rm deltaScripts/archived
This commit is contained in:
parent
ce7147c76c
commit
2f3b2e2d2f
99 changed files with 0 additions and 6195 deletions
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add an index to warning table for officeid/phensig
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-1.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="CREATE INDEX warning_query_index on warning using btree (officeid, phensig);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will remove all data from the bufrua table
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-1.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="delete from bufrua;"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,29 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will change the name of the history column in the gfe_gridhistory
|
||||
# table (metadata) to parent.
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-1.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="ALTER TABLE gfe_gridhistory RENAME COLUMN history TO parent;"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,36 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script updates xxxid entries in the stdtextproducts table.
|
||||
# It takes xxxid entries that are 2 characters and pads them to 3
|
||||
# by adding a trailing space.
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-1 where all xxxid
|
||||
# entries that are inserted and queried for must be 3 characters.
|
||||
#
|
||||
# Set up
|
||||
# 1) Perform this task on the machine with the edex database
|
||||
# 2) create a directory where you have read write permissions
|
||||
# 3) cd to the directory
|
||||
# 4) Copy this file (update_stdtextproducts.sh) to this directory
|
||||
# 5) do: chmod +x update_stdtextproducts.sh
|
||||
# 6) ./update_stdtextproducts.sh
|
||||
|
||||
export DEFAULT_HOST=${DEFAULT_HOST:-localhost}
|
||||
psql=/awips2/psql/bin/psql
|
||||
dq='"'
|
||||
selCmd="select distinct xxxid from stdtextproducts where length(xxxid) = 2 ;"
|
||||
|
||||
rm -f ./tmp.sql
|
||||
${psql} -h ${DEFAULT_HOST} -U awips -d fxatext -c "${selCmd}" | \
|
||||
sed -n -e '/^ \(..\)$/s//UPDATE stdtextproducts set xxxid="\1 " where xxxid="\1";/p' \
|
||||
-e '/^ \(.\)$/s//UPDATE stdtextproducts set xxxid="\1 " where xxxid="\1";/p' \
|
||||
-e '/^$/s//UPDATE stdtextproducts set xxxid=" " where length(xxxid) = 0 ;/p' | \
|
||||
sed -e"s/$dq/'/g" > ./tmp.sql
|
||||
|
||||
cat ./tmp.sql
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${psql} -h ${DEFAULT_HOST} -U awips -d fxatext < ./tmp.sql
|
||||
rm -f ./tmp.sql
|
|
@ -1,35 +0,0 @@
|
|||
-- Index: convsigmet_section_parentid_idex
|
||||
|
||||
-- DROP INDEX convsigmet_section_parentid_idex;
|
||||
|
||||
CREATE INDEX convsigmet_section_parentid_idex
|
||||
ON convsigmet_section
|
||||
USING btree
|
||||
(parentid);
|
||||
|
||||
-- Index: convsigmet_location_parentid_idex
|
||||
|
||||
-- DROP INDEX convsigmet_location_parentid_idex;
|
||||
|
||||
CREATE INDEX convsigmet_location_parentid_idex
|
||||
ON convsigmet_location
|
||||
USING btree
|
||||
(parentid);
|
||||
|
||||
-- Index: airmet_report_parentid_idex
|
||||
|
||||
-- DROP INDEX airmet_report_parentid_idex;
|
||||
|
||||
CREATE INDEX airmet_report_parentid_idex
|
||||
ON airmet_report
|
||||
USING btree
|
||||
(parentid);
|
||||
|
||||
-- Index: airmet_location_parentid_idex
|
||||
|
||||
-- DROP INDEX airmet_location_parentid_idex;
|
||||
|
||||
CREATE INDEX airmet_location_parentid_idex
|
||||
ON airmet_location
|
||||
USING btree
|
||||
(parentid);
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add an index to sfcobs
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-3.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="CREATE INDEX reporttype_index ON sfcobs USING btree (reporttype);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will drop the warning tables so they will be recreated
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-3.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="DROP TABLE warning, warning_ugczone CASCADE; DELETE FROM plugin_info WHERE name='warning'; DROP TABLE IF EXISTS practicewarning CASCADE;"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will drop the fog table and remove the fog hdf5 files.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-3.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="DROP TABLE IF EXISTS fog; UPDATE plugin_info SET initialized='false' WHERE name='fog';"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Dropping the metadata fog table."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: Purging fog hdf5 files."
|
||||
if [ -d /awips2/edex/data/hdf5/fog ]; then
|
||||
rm -rfv /awips2/edex/data/hdf5/fog
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,30 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will set the initialization flag for gfe to FALSE in plugin_info.
|
||||
# The next time edex starts, an additional table will be created for gfe.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-4.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="UPDATE plugin_info SET initialized='false' WHERE name='com.raytheon.edex.plugin.gfe'"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL Executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Updating plugin_info."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The updated was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,22 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Script to remove sampleId tags from sample sets
|
||||
if [ -d /awips2/edex/data/utility/common_static ]; then
|
||||
if [ "$1" == "removeBak" ]; then
|
||||
for bak in `ls -1 /awips2/edex/data/utility/common_static/*/*/gfe/sampleSets/*.xml.bk$`; do
|
||||
echo "Removing $bak"
|
||||
rm $bak
|
||||
done
|
||||
else
|
||||
for ss in `ls -1 /awips2/edex/data/utility/common_static/*/*/gfe/sampleSets/*.xml`; do
|
||||
echo "Editing $ss..."
|
||||
sed -i.bk$ -e 's/<sampleId.*\/>//' $ss
|
||||
done
|
||||
echo
|
||||
echo " Update complete. Please verify you sample sets are still loading correctly."
|
||||
echo " If you find an issue you can restore your previous version from the file with the .bk$ extension"
|
||||
echo " and manually remove the <sampleId .../> tag."
|
||||
echo " Once you have verified all your sample sets are ok rerun this script with removeBak to remove the .bk$ files"
|
||||
echo " Example:"
|
||||
echo " $0 removeBak"
|
||||
fi
|
||||
fi
|
|
@ -1,30 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "Removing grid data in database"
|
||||
${PSQL} -d metadata -U awips -c "truncate table grib, grib_models, gridcoverage"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Removing grid hdf5 data"
|
||||
rm -rf /awips2/edex/data/hdf5/grib
|
||||
|
||||
echo ""
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,29 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will drop the purge table in the metadata database.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-6.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="DROP TABLE awips.purge_rules;"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The psql executable does not exist."
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Dropping the awips.purge_rules table."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,24 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will remove the gfe vcmodule directory.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-6.
|
||||
#
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
if [ -d /awips2/edex/data/utility/common_static/base/gfe/vcmodule ]; then
|
||||
echo "INFO: removing /awips2/edex/data/utility/common_static/base/gfe/vcmodule."
|
||||
|
||||
rm -rfv /awips2/edex/data/utility/common_static/base/gfe/vcmodule/*
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to remove the contents of /awips2/edex/data/utility/common_static/base/gfe/vcmodule."
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "INFO: No updates to perform."
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -1,49 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will remove the existing purge configuration files
|
||||
# from base. The existing site-specifc purge configuration files
|
||||
# will be copied from edex_static to common_static.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-6.
|
||||
#
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: removing /awips2/edex/data/utility/edex_static/base/purge."
|
||||
|
||||
if [ -d /awips2/edex/data/utility/edex_static/base/purge ]; then
|
||||
rm -rfv /awips2/edex/data/utility/edex_static/base/purge
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "INFO: copying site-specifc purge rules to common_static."
|
||||
if [ -d /awips2/edex/data/utility/edex_static/site ]; then
|
||||
cd /awips2/edex/data/utility/edex_static/site
|
||||
for site in `ls -1d *`;
|
||||
do
|
||||
if [ -d ${site}/purge ]; then
|
||||
if [ ! -d /awips2/edex/data/utility/common_static/site/${site} ]; then
|
||||
mkdir -p /awips2/edex/data/utility/common_static/site/${site}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to create /awips2/edex/data/utility/common_static/site/${site}."
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
mv -v ${site}/purge /awips2/edex/data/utility/common_static/site/${site}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to move ${site}/purge to /awips2/edex/data/utility/common_static/site/${site}."
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,24 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will remove any existing log files from
|
||||
# /awips2/GFESuite/logs.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-7.
|
||||
# This update is for both the database and the processing
|
||||
# servers.
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
if [ -d /awips2/GFESuite/logs ]; then
|
||||
echo "INFO: Removing GFESuite Logs."
|
||||
rm -rf /awips2/GFESuite/logs/*
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to remove the contents of /awips2/GFESuite/logs."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,59 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will remove Canadian information from the grib and grib_models table; the associated hdf5
|
||||
# files will also be removed. This script was created for DR #11773.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-7.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_DELETE_CMD1="DELETE FROM awips.grib WHERE modelinfo_id IN (SELECT id FROM awips.grib_models WHERE modelname IN('Canadian-NH','Canadian-Reg'));"
|
||||
SQL_DELETE_CMD2="DELETE from awips.grib_models WHERE modelname IN('Canadian-NH','Canadian-Reg');"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The psql executable does not exist."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Removing Canadian records from awips.grib."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_DELETE_CMD1}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "INFO: Removing Canadian records from awips.grib_models."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_DELETE_CMD2}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "INFO: Deleting Canadian HDF5 Grib files."
|
||||
if [ -d /awips2/edex/data/hdf5/grib ]; then
|
||||
if [ -d /awips2/edex/data/hdf5/grib/Canadian-NH ]; then
|
||||
echo "INFO: Removing '/awips2/edex/data/hdf5/grib/Canadian-NH'."
|
||||
rm -rf /awips2/edex/data/hdf5/grib/Canadian-NH
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to remove '/awips2/edex/data/hdf5/grib/Canadian-NH'."
|
||||
echo "FATAL: Update failed."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
if [ -d /awips2/edex/data/hdf5/grib/Canadian-Reg ]; then
|
||||
echo "INFO: Removing '/awips2/edex/data/hdf5/grib/Canadian-Reg'."
|
||||
rm -rf /awips2/edex/data/hdf5/grib/Canadian-Reg
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to remove '/awips2/edex/data/hdf5/grib/Canadian-Reg'."
|
||||
echo "FATAL: Update failed."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successful."
|
||||
exit 0
|
|
@ -1,63 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add two new entries to the satellite_creating_entities
|
||||
# table in the metadata database.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-7.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_SELECT1="SELECT COUNT(*) FROM awips.satellite_creating_entities WHERE entityid = 17;"
|
||||
SQL_INSERT_CMD1="INSERT INTO awips.satellite_creating_entities VALUES (17,'GOES-14(O)');"
|
||||
SQL_SELECT2="SELECT COUNT(*) FROM awips.satellite_creating_entities WHERE entityid = 18;"
|
||||
SQL_INSERT_CMD2="INSERT INTO awips.satellite_creating_entities VALUES (18,'GOES-15(P)');"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The psql executable does not exist."
|
||||
echo "FATAL: Update failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Determining if Record '1' needs to be inserted: (17,'GOES-14(O)')."
|
||||
RCOUNT=`${PSQL} -U awips -d metadata --no-align --field-separator ',' \
|
||||
--pset footer --tuples-only -c "${SQL_SELECT1}"`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
if [ ${RCOUNT} -eq 0 ]; then
|
||||
echo "INFO: Inserting first additional record: entityid = 17."
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_INSERT_CMD1}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "INFO: A record with entityid=17 is already present."
|
||||
fi
|
||||
|
||||
echo "INFO: Determining if Record '2' needs to be inserted: (18,'GOES-15(P)')."
|
||||
RCOUNT=`${PSQL} -U awips -d metadata --no-align --field-separator ',' \
|
||||
--pset footer --tuples-only -c "${SQL_SELECT2}"`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
if [ ${RCOUNT} -eq 0 ]; then
|
||||
echo "INFO: Inserting second additional record: entityid = 18."
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_INSERT_CMD2}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "INFO: A record with entityid=18 is already present."
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successful."
|
||||
exit 0
|
|
@ -1,42 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add a new entry to the satellite_geostationary_positions table
|
||||
# in the metadata database.
|
||||
#
|
||||
# This update needs to be performed with build 11.9.0-7.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_SELECT1="SELECT COUNT(*) FROM awips.satellite_geostationary_positions WHERE satelliteName = 'GOES-15(P)';"
|
||||
SQL_INSERT_CMD1="INSERT INTO awips.satellite_geostationary_positions (satelliteName,height,latitude,longitude) VALUES ('GOES-15(P)',35794,0,-135);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The psql executable does not exist."
|
||||
echo "FATAL: Updated failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Determining if Record '1' needs to be inserted: ('GOES-15(P)',35794,0,-135)."
|
||||
RCOUNT=`${PSQL} -U awips -d metadata --no-align --field-separator ', ' \
|
||||
--pset footer --tuples-only -c "${SQL_SELECT1}"`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
if [ ${RCOUNT} -eq 0 ]; then
|
||||
echo "INFO: Inserting first additional record: satelliteName = 'GOES-15(P)'."
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_INSERT_CMD1}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "INFO: A record with satelliteName='GOES-15(P)' is already present."
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successful."
|
||||
exit 0
|
|
@ -1,41 +0,0 @@
|
|||
#! /bin/sh
|
||||
# This script updates afosid entries in the afos_to_awips table.
|
||||
# It takes afosid entries that are 7 or 8 characters and pads them to 9.
|
||||
#
|
||||
# This needs to be performed with build 11.9.0-7 where all afosid
|
||||
# entries that are inserted and queried for must be 9 characters and
|
||||
# wmottaaii must be 6 and wmoccc must be 4.
|
||||
#
|
||||
# Alternate method. If you have the afos2awips.txt used to first generate the
|
||||
# afos_to_awips table it can be copied to /awips2/edex/data/ndm and when
|
||||
# ingested the table will correct.
|
||||
#
|
||||
# Set up
|
||||
# 1) Perform this task on the machine with the edex database
|
||||
# 2) create a directory where you have read write permissions
|
||||
# 3) cd to the directory
|
||||
# 4) Copy this file (update_afos_to_awips.sh) to this directory
|
||||
# 5) do: chmod +x update_afos_to_awips.sh
|
||||
# 6) ./update_afos_to_awips.sh
|
||||
|
||||
export DEFAULT_HOST=${DEFAULT_HOST:-localhost}
|
||||
psql=/awips2/psql/bin/psql
|
||||
dq='"'
|
||||
|
||||
rm -f ./tmp.sql
|
||||
touch ./tmp.sql
|
||||
|
||||
selCmd="select distinct afosid from afos_to_awips where length(afosid) = 8 or length(afosid) = 7 or length(afosid) = 6 ;"
|
||||
${psql} -h ${DEFAULT_HOST} -U awips -d fxatext -c "${selCmd}" | \
|
||||
sed -n -e '/^ \([^ ][^ ][^ ][^ ][^ ][^ ][^ ][^ ]\) *$/s//UPDATE afos_to_awips set afosid="\1 " where afosid="\1";/p' \
|
||||
-e '/^ \([^ ][^ ][^ ][^ ][^ ][^ ][^ ]\) *$/s//UPDATE afos_to_awips set afosid="\1 " where afosid="\1";/p' \
|
||||
-e '/^ \([^ ][^ ][^ ][^ ][^ ][^ ]\) *$/s//UPDATE afos_to_awips set afosid="\1 " where afosid="\1";/p' | \
|
||||
sed -e"s/$dq/'/g" >> ./tmp.sql
|
||||
|
||||
cat ./tmp.sql
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${psql} -h ${DEFAULT_HOST} -U awips -d fxatext < ./tmp.sql
|
|
@ -1,187 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Issue #436
|
||||
# Update script to move and modify MakeHazardConfig.py for local effects based hazards work assignment
|
||||
|
||||
import os, re, string, sys
|
||||
|
||||
configStartPattern = """###########################################################
|
||||
############## ###############
|
||||
############## CONFIGURATION SECTION ###############
|
||||
############## ###############
|
||||
"""
|
||||
|
||||
configEndPattern = "#################### END CONFIGURATION SECTION #################"
|
||||
|
||||
localEffectsAdditions = """# Dictionary mapping Hazard Types to applicable local effect areas
|
||||
# that can be intersected with the zone edit areas.
|
||||
# You should not define localEffectAreas entries for Tropical Cyclone
|
||||
# or Convective Watches.
|
||||
localEffectAreas = {}
|
||||
|
||||
#localEffectAreas = {
|
||||
# 'Winter Weather' : ["Below_1000","Below_1500","Below_2000","Below_2500","Below_3000","Below_3500","Below_4000",
|
||||
# "Above_1000","Above_1500","Above_2000","Above_2500","Above_3000","Above_3500"],
|
||||
# }
|
||||
|
||||
# Dictionary associating local Effect Area names with a corresponding
|
||||
# segment number, display name, and list of zones to be auto-selected
|
||||
# If you do not wish to auto-select zones you should supply an empty list
|
||||
#
|
||||
# The display name allows you to display a "pretty" string in the UI rather
|
||||
# than the edit area name. If the display name is empty ("") the edit area
|
||||
# name will be used.
|
||||
localAreaData = {}
|
||||
|
||||
#localAreaData = {
|
||||
# "Below_1000" : ( 999, "", []),
|
||||
# "Below_1500" : (1499, "", []),
|
||||
# "Below_2000" : (1999, "", []),
|
||||
# "Below_2500" : (2499, "", []),
|
||||
# "Below_3000" : (2999, "", []),
|
||||
# "Below_3500" : (3499, "", []),
|
||||
# "Below_4000" : (3999, "", []),
|
||||
# "Above_1000" : (1000, "", []),
|
||||
# "Above_1500" : (1500, "", []),
|
||||
# "Above_2000" : (2000, "", []),
|
||||
# "Above_2500" : (2500, "", []),
|
||||
# "Above_3000" : (3000, "", []),
|
||||
# "Above_3500" : (3500, "", []),
|
||||
# }
|
||||
"""
|
||||
|
||||
hazardDictPattern = re.compile("hazardDict\s*=\s*\{(.*?)}", re.DOTALL)
|
||||
entryPattern = re.compile("('.*?')\s*:\s*(\[.*?\])", re.DOTALL)
|
||||
gumPattern = re.compile('\nsiteID = DataManager.getCurrentInstance\(\).getSiteID\(\)\nif siteID == "GUM":\n(.*?)\nelse:\n(.*?\n)\n', re.DOTALL)
|
||||
sitePattern = re.compile("/awips2/edex/data/utility/cave_static/site/(.*?)/")
|
||||
leadingSpacePattern = re.compile("(\s*?)\S")
|
||||
commentPattern = re.compile("^", re.MULTILINE)
|
||||
|
||||
orderdDictImport = "\n\nfrom collections import OrderedDict\n"
|
||||
gumComment = "\n# for GUM use comment out the above definition and uncomment the one below\n\n"
|
||||
|
||||
fixed = []
|
||||
status = 0
|
||||
|
||||
def fixEntry(m):
|
||||
newEntry = "(" + m.string[m.start(1):m.end(1)] + ", " + m.string[m.start(2):m.end(2)] + ")"
|
||||
return newEntry
|
||||
|
||||
def fixHazardDict(m):
|
||||
newDict = "hazardDict = OrderedDict([" + m.string[m.start(1):m.end(1)] + "])"
|
||||
newDict = re.sub(entryPattern, fixEntry, newDict)
|
||||
return newDict
|
||||
|
||||
def unindent(s):
|
||||
m = re.match(leadingSpacePattern, s)
|
||||
if m is not None:
|
||||
p = re.compile("^"+m.string[m.start(1):m.end(1)], re.MULTILINE)
|
||||
s = re.sub(p, "", s)
|
||||
return s
|
||||
|
||||
def comment(s):
|
||||
s = re.sub(commentPattern, "#", s)
|
||||
return s
|
||||
|
||||
def processDir(arg, srcDir, names):
|
||||
global fixed, status
|
||||
if arg in names:
|
||||
src = os.path.join(srcDir, arg)
|
||||
|
||||
# skip if already fixed
|
||||
if src in fixed:
|
||||
return
|
||||
|
||||
print
|
||||
if "userPython/procedures" in srcDir:
|
||||
destDir = string.replace(srcDir, "userPython/procedures", "userPython/utilities")
|
||||
if not os.path.exists(destDir):
|
||||
os.makedirs(destDir)
|
||||
dest = os.path.join(destDir, arg)
|
||||
if os.path.exists(dest):
|
||||
print "MakeHazardConfig.py exists in both\n\t"+srcDir+" and\n\t"+destDir+"\nPlease update this file manually"
|
||||
status = 1
|
||||
return
|
||||
else:
|
||||
print "moving", src, "to utilities"
|
||||
try:
|
||||
os.rename(src, dest)
|
||||
src = dest
|
||||
except:
|
||||
print "Error moving file\nPlease update this file manually"
|
||||
status = 1
|
||||
return
|
||||
|
||||
try:
|
||||
f = open(src, 'r')
|
||||
contents = f.read()
|
||||
except:
|
||||
print "Error reading file: ",src
|
||||
print sys.exc_info()[0]
|
||||
print "Please update this file manually"
|
||||
status = 1
|
||||
return
|
||||
finally:
|
||||
try:
|
||||
f.close()
|
||||
except:
|
||||
pass # if we read the file successfully don't care about close
|
||||
|
||||
# skip if doesn't contain the old hazardDict pattern
|
||||
if re.search(hazardDictPattern, contents) is None:
|
||||
print src, "cannot be automatically updated.\nPlease manually update this file if required"
|
||||
status = 1
|
||||
return
|
||||
|
||||
print "updating", src
|
||||
m = re.search(gumPattern, contents)
|
||||
if m is not None:
|
||||
# determine site ID
|
||||
m1 = re.match(sitePattern, srcDir)
|
||||
siteId = srcDir[m1.start(1):m1.end(1)]
|
||||
|
||||
gumBlock = unindent(contents[m.start(1):m.end(1)])
|
||||
regBlock = unindent(contents[m.start(2):m.end(2)])
|
||||
if siteId == "GUM":
|
||||
contents = contents[:m.start(0)] + orderdDictImport + comment(regBlock) + gumComment + gumBlock + "\n" + contents[m.end(0):]
|
||||
else:
|
||||
contents = contents[:m.start(0)] + orderdDictImport + regBlock + gumComment + comment(gumBlock) + "\n" + contents[m.end(0):]
|
||||
|
||||
newContents = re.sub(hazardDictPattern, fixHazardDict, contents)
|
||||
newContents = re.sub(configStartPattern, "", newContents)
|
||||
newContents = re.sub(configEndPattern, "", newContents)
|
||||
newContents += localEffectsAdditions
|
||||
|
||||
try:
|
||||
back = src+".BAK"
|
||||
os.rename(src,back)
|
||||
except:
|
||||
print "Unable to create backup file, Please update this file manually"
|
||||
status = 1
|
||||
return
|
||||
|
||||
try:
|
||||
f = open(src, 'w')
|
||||
f.write(newContents)
|
||||
except:
|
||||
print "Error writing updated file:"
|
||||
print sys.exc_info[0]
|
||||
print "Please restore from", back, "and update manually"
|
||||
status = 1
|
||||
finally:
|
||||
try:
|
||||
f.close()
|
||||
fixed.append(src)
|
||||
except:
|
||||
print "Error closing updated file:"
|
||||
print sys.exc_info[0]
|
||||
print "Please restore from", back, "and update manually"
|
||||
status = 1
|
||||
|
||||
def main():
|
||||
os.path.walk("/awips2/edex/data/utility/cave_static/site", processDir, "MakeHazardConfig.py")
|
||||
if status != 0:
|
||||
print "1 or more errors were encountered. Please review the log carefully."
|
||||
return status
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
|
@ -1,73 +0,0 @@
|
|||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
|
||||
logger = None
|
||||
|
||||
def main():
|
||||
__initLogger()
|
||||
|
||||
logger.info("Staring upgrade script for DR #1117...")
|
||||
logger.info("All information will also be written to removeObeGribParamInfoFiles.log")
|
||||
|
||||
if not os.path.isdir('/awips2/edex/data/utility/edex_static/site/'):
|
||||
logger.error("This script must be run on the EDEX server. Exiting.")
|
||||
sys.exit(-1)
|
||||
|
||||
obsoleteFiles = glob.glob('/awips2/edex/data/utility/edex_static/site/*/grib/parameterInfo/*.xml')
|
||||
|
||||
logger.info("Deleting obsolete grib parameter info files.")
|
||||
for file in obsoleteFiles:
|
||||
try:
|
||||
os.remove(file)
|
||||
except (OSError, IOError):
|
||||
logger.exception("Could not delete file [" + file + ". Please manually delete this file after the script has exited.")
|
||||
|
||||
logger.info("Upgrade script complete.")
|
||||
logger.info("The following files will need to be regenerated before restarting EDEX: " + str(obsoleteFiles))
|
||||
|
||||
|
||||
def __initLogger():
|
||||
global logger
|
||||
logger = logging.getLogger("removeObeGribParamInfoFiles")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
format = "%(asctime)s %(name)s %(levelname)s: %(message)s", "%H:%M:%S"
|
||||
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s", "%H:%M:%S")
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
ch = logging.FileHandler('removeObeGribParamInfoFiles.log')
|
||||
ch.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s", "%H:%M:%S")
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,193 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# NOTE: UPGRADE SCRIPT IS SIMPLE. XML Purge Rule Cannot be condensed and have multiple tags on a line. Expects pretty print xml that is easily readable/parsable
|
||||
|
||||
SITE_PATH="/awips2/edex/data/utility/common_static/site"
|
||||
|
||||
if [ ! -d $SITE_PATH ]; then
|
||||
echo "Cannot find site directory. $SITE_PATH does not exist"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "This script should be run as user awips. Will back up purge rule directories to .bak"
|
||||
echo "Press any key to continue or ^C to quit"
|
||||
read
|
||||
|
||||
for d in `find $SITE_PATH -type d -name "purge"`; do
|
||||
newDir=$d.new
|
||||
if [ -d "$newDir" ]; then
|
||||
rm -rf $newDir
|
||||
fi
|
||||
mkdir -p $newDir
|
||||
|
||||
echo "Processing directory $d"
|
||||
|
||||
for f in `ls $d/*PurgeRules.xml`; do
|
||||
fileName=${f##*/}
|
||||
NEW_FILE=$newDir/${fileName}
|
||||
if [ -f $f.modelPathKey ]; then
|
||||
f="$f.modelPathKey"
|
||||
elif [ -f $.radarPathKey ]; then
|
||||
f="$f.radarPathKey"
|
||||
fi
|
||||
echo " Processing $f into $NEW_FILE"
|
||||
|
||||
PIFS=$IFS
|
||||
IFS=$'\n'
|
||||
|
||||
# prescan to determine all key combinations
|
||||
hasDefaultRule='0'
|
||||
keys=()
|
||||
keysIndex=0
|
||||
|
||||
for key in `grep key $f `; do
|
||||
# strip tags
|
||||
key=${key#*<key>}
|
||||
key=${key%</key>*}
|
||||
|
||||
if [ "$key" == "default" ]; then
|
||||
hasDefaultRule='1'
|
||||
else
|
||||
# split on semicolon and equal
|
||||
keyTokens=( $( echo "$key" | tr ';=' '\n' ) )
|
||||
size="${#keyTokens[@]}"
|
||||
tokenIndex=0
|
||||
|
||||
while [ "$tokenIndex" -lt "$size" ]; do
|
||||
curToken="${keyTokens[$tokenIndex]}"
|
||||
addKey=1
|
||||
|
||||
# verify key hasn't already been added
|
||||
if [ $keysIndex -gt 0 ]; then
|
||||
tempIndex=0
|
||||
while [ $tempIndex -lt $keysIndex ]; do
|
||||
if [ "${keys[$tempIndex]}" == "$curToken" ]; then
|
||||
addKey=0
|
||||
break
|
||||
fi
|
||||
let tempIndex+=1
|
||||
done
|
||||
fi
|
||||
|
||||
if [ $addKey -eq 1 ]; then
|
||||
keys[$keysIndex]="$curToken"
|
||||
let keysIndex+=1
|
||||
fi
|
||||
|
||||
let tokenIndex+=2
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
keysSize=$keysIndex
|
||||
keysIndex=0
|
||||
state='0'
|
||||
rule=()
|
||||
ruleIndex=0
|
||||
ruleTag="rule"
|
||||
|
||||
for line in `cat $f`; do
|
||||
case $state in
|
||||
# looking for <purgeRuleSet>
|
||||
0) if [[ $line =~ "<purgeRuleSet .*>" ]]; then
|
||||
# drop name space
|
||||
echo "<purgeRuleSet>" >> $NEW_FILE
|
||||
keyIndex=0
|
||||
while [ $keyIndex -lt $keysSize ]; do
|
||||
echo " <key>${keys[$keyIndex]}</key>" >> $NEW_FILE
|
||||
let keyIndex+=1
|
||||
done
|
||||
state='1'
|
||||
else
|
||||
# copy line to new file
|
||||
echo $line >> $NEW_FILE
|
||||
fi
|
||||
;;
|
||||
# looking for <rule>
|
||||
1) if [[ $line =~ "<rule>" ]]; then
|
||||
state='2'
|
||||
else
|
||||
# copy line to new file
|
||||
echo $line >> $NEW_FILE
|
||||
fi
|
||||
;;
|
||||
# looking for <key>
|
||||
2) if [[ $line =~ "<key>.*</key>" ]]; then
|
||||
state='3'
|
||||
|
||||
# strip tags
|
||||
key=${line#*<key>}
|
||||
key=${key%</key>*}
|
||||
|
||||
if [ "$key" == "default" ]; then
|
||||
# default rule, nothing to do besides set rule tag
|
||||
ruleTag="defaultRule"
|
||||
else
|
||||
# normal rule, split into tokens, and order by keys
|
||||
ruleTag="rule"
|
||||
|
||||
# split on semicolon and equal
|
||||
keyTokens=( $( echo "$key" | tr ';=' '\n' ) )
|
||||
tokenSize="${#keyTokens[@]}"
|
||||
keyIndex=0
|
||||
|
||||
while [ $keyIndex -lt $keysSize ]; do
|
||||
curKey="${keys[$keyIndex]}"
|
||||
tokenIndex=0
|
||||
|
||||
while [ $tokenIndex -lt $tokenSize ]; do
|
||||
if [ "$curKey" == "${keyTokens[$tokenIndex]}" ]; then
|
||||
# found key, add value tag to rule
|
||||
let tokenIndex+=1
|
||||
rule[$ruleIndex]=" <keyValue>${keyTokens[$tokenIndex]}</keyValue>"
|
||||
let ruleIndex+=1
|
||||
break
|
||||
else
|
||||
# advance tokenIndex to next key
|
||||
let tokenIndex+=2
|
||||
fi
|
||||
done
|
||||
|
||||
let keyIndex+=1
|
||||
done
|
||||
fi
|
||||
elif [[ ! $line =~ "</?id>" ]] && [[ ! $line =~ "<pluginName>" ]]; then
|
||||
# copy line to rule buffer, skipping <id> and <pluginName>
|
||||
rule[$ruleIndex]="$line"
|
||||
let ruleIndex+=1
|
||||
fi
|
||||
;;
|
||||
# looking for </rule>
|
||||
3) if [[ $line =~ "</rule>" ]]; then
|
||||
state='1'
|
||||
ruleSize=$ruleIndex
|
||||
ruleIndex=0
|
||||
|
||||
echo " <$ruleTag>" >> $NEW_FILE
|
||||
while [ $ruleIndex -lt $ruleSize ]; do
|
||||
echo "${rule[$ruleIndex]}" >> $NEW_FILE
|
||||
let ruleIndex+=1
|
||||
done
|
||||
echo " </$ruleTag>" >> $NEW_FILE
|
||||
|
||||
ruleIndex=0
|
||||
rule=()
|
||||
elif [[ ! $line =~ "</?id>" ]] && [[ ! $line =~ "<pluginName>" ]]; then
|
||||
# copy line to rule buffer
|
||||
rule[$ruleIndex]="$line"
|
||||
let ruleIndex+=1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
IFS=$PIFS
|
||||
done
|
||||
|
||||
echo " Moving $d to $d.bak"
|
||||
mv $d $d.bak
|
||||
echo " Moving $newDir to $d"
|
||||
mv $newDir $d
|
||||
done
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add an index to the gfelocktable table
|
||||
#
|
||||
# This needs to be performed with build 12.5.1.
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="CREATE INDEX lock_parmid_idx ON gfelocktable USING btree (parmid);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will drop some GFE tables so they will be recreated
|
||||
#
|
||||
# This needs to be performed with build 12.5.1
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="DROP TABLE IF EXISTS smartinit, gfe_iscsend CASCADE; DELETE FROM plugin_info WHERE name='com.raytheon.edex.plugin.gfe';"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will remove the existing monitoringArea files from
|
||||
# fog, safeseas, and snow site directories.
|
||||
#
|
||||
# This update needs to be performed with build 12.6.1.
|
||||
#
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
if [ -d /awips2/edex/data/utility/common_static/site ]; then
|
||||
cd /awips2/edex/data/utility/common_static/site
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Site directories do not exist!"
|
||||
exit 1
|
||||
fi
|
||||
for site in `ls -1d *`;
|
||||
do
|
||||
for comp in fog safeseas snow
|
||||
do
|
||||
echo "INFO: Removing files from /awips2/edex/data/utility/common_static/site/$site/$comp/monitoringArea"
|
||||
if [ -d /awips2/edex/data/utility/common_static/site/${site}/${comp}/monitoringArea ]; then
|
||||
cd /awips2/edex/data/utility/common_static/site/${site}/${comp}/monitoringArea
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Could not change directory to site/$site/$comp/monitoringArea directory"
|
||||
else
|
||||
rm /awips2/edex/data/utility/common_static/site/${site}/${comp}/monitoringArea/*
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Could not remove files from /awips2/edex/data/utility/common_static/site/$site/$comp/monitoringArea"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
||||
echo "INFO: This update was successful."
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -1,22 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
CREATE SCHEMA events AUTHORIZATION awips;
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="createEventsSchema.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - adding the events schema to the metadata database"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="increaseAggregateRecordGroupingLength.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - increasing the size of the aggregate.grouping column"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="convertAggregateRecordGroupToXml.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - converting the aggregate.grouping column to xml"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,38 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
-- Temporarily replace dashes in pluginName rows with @@@
|
||||
update events.aggregate set grouping = regexp_replace(grouping, 'pluginName:(.*?)-(.*)', E'pluginName:\\1@@@\\2', 'g');
|
||||
|
||||
-- Convert to XML format
|
||||
update events.aggregate set grouping = regexp_replace(grouping, ':', '" value="', 'g');
|
||||
update events.aggregate set grouping = regexp_replace(grouping, '-', '"/><group name="', 'g');
|
||||
update events.aggregate set grouping = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?><stat><group name="' || grouping || '"/></stat>';
|
||||
|
||||
-- Restore dashes from @@@
|
||||
update events.aggregate set grouping = regexp_replace(grouping, '<group name="(.*?)" value="(.*?)@@@(.*?)"', E'<group name="\\1" value="\\2-\\3"', 'g');
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="fixParameterUnits.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - fix parameter units degree definitions"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,30 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
-- update 'Degree true' and 'deg true' to 'deg'
|
||||
update metadata.parameter set unit = 'deg' where unit like 'Deg%true' or unit like 'deg%true';
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,22 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
ALTER TABLE events.aggregate ALTER COLUMN grouping TYPE varchar(1024);
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="addRouteToSubscriptionSlots.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d ebxml -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,72 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect ebxml;
|
||||
|
||||
-- Create a function that will load plpgsql
|
||||
CREATE OR REPLACE FUNCTION make_plpgsql()
|
||||
RETURNS VOID
|
||||
LANGUAGE SQL
|
||||
AS $$
|
||||
CREATE LANGUAGE plpgsql;
|
||||
$$;
|
||||
|
||||
-- Load plpgsql if it is not already loaded
|
||||
SELECT
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM pg_catalog.pg_language
|
||||
WHERE lanname='plpgsql'
|
||||
)
|
||||
THEN NULL
|
||||
ELSE make_plpgsql() END;
|
||||
|
||||
-- The function to add a route slot to subscriptions
|
||||
CREATE OR REPLACE FUNCTION addRouteSlot() RETURNS void AS $$
|
||||
DECLARE
|
||||
registryobject_record RECORD;
|
||||
value_key INTEGER;
|
||||
slot_key INTEGER;
|
||||
registryobjectslot_key INTEGER;
|
||||
|
||||
BEGIN
|
||||
-- Find all subscription registry objects
|
||||
FOR registryobject_record IN SELECT id from registryobject where objecttype like '%Subscription' LOOP
|
||||
-- Create the value for the slot
|
||||
SELECT INTO value_key nextval('hibernate_sequence');
|
||||
INSERT INTO value (dtype, key, stringvalue) VALUES ('StringValueType', value_key, 'OPSNET');
|
||||
-- Create the slot entry itself
|
||||
SELECT INTO slot_key nextval('hibernate_sequence');
|
||||
INSERT INTO slot (key, name, slotvalue_key) VALUES (slot_key, 'route', value_key);
|
||||
-- Create the registryobject_slot entry
|
||||
SELECT INTO registryobjectslot_key nextval('hibernate_sequence');
|
||||
INSERT INTO registryobject_slot (registryobject_id, child_slot_key) VALUES (registryobject_record.id, slot_key);
|
||||
END LOOP;
|
||||
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Add the route slots to subscriptions
|
||||
SELECT addRouteSlot();
|
||||
|
||||
-- Drop functions
|
||||
DROP FUNCTION make_plpgsql();
|
||||
DROP FUNCTION addRouteSlot();
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="convertPluginNameToDataType.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,30 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
-- Replace pluginName with dataType
|
||||
update events.aggregate set grouping = regexp_replace(grouping, 'pluginName', 'dataType', 'g') where eventtype = 'com.raytheon.uf.common.stats.ProcessEvent' ;
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,3 +0,0 @@
|
|||
ALTER TABLE binlightning ADD COLUMN lightsource character varying(5);
|
||||
ALTER TABLE binlightning ALTER COLUMN lightsource SET STORAGE EXTENDED;
|
||||
ALTER TABLE binlightning ALTER COLUMN lightsource SET DEFAULT 'NLDN'::character varying;
|
|
@ -1,81 +0,0 @@
|
|||
#!/bin/bash
|
||||
# DR #1846 - this DR will remove all reftimeindex indices from the metadata database.
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
|
||||
#_timeindexname="timeindex"
|
||||
RETRIEVE_INDEX_SQL="SELECT indexname FROM pg_indexes WHERE indexname LIKE '%reftimeindex%' ORDER BY indexname;"
|
||||
_index_list_txt=indexlist.txt
|
||||
#_reftime_indx_length=12
|
||||
|
||||
echo "INFO: update started"
|
||||
|
||||
# retrieve the reftime indices
|
||||
${PSQL} -U awips -d metadata -c "${RETRIEVE_INDEX_SQL}" -t -o ${_index_list_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of data uri indices."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for index in `cat ${_index_list_txt}`;
|
||||
do
|
||||
# determine which table the index is in.
|
||||
SQL="SELECT tablename FROM pg_indexes WHERE indexname = '${index}';"
|
||||
table=`${PSQL} -U awips -d metadata -c "${SQL}" -t`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to determine which table ${index} belongs to."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#length=${#index}
|
||||
#let LENGTH_DIFF=${length}-${_reftime_indx_length}
|
||||
#prefix=${index:0:$LENGTH_DIFF}
|
||||
|
||||
# remove the index
|
||||
${PSQL} -U awips -d metadata -c "DROP INDEX ${index};"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop index - ${index}."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# create the new index
|
||||
SQL="CREATE INDEX ${index} ON ${table} USING btree(reftime, forecasttime);"
|
||||
${PSQL} -U awips -d metadata -c "${SQL}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to create index ${table}${_timeindexname} on table ${table}."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
rm -f ${_index_list_txt}
|
||||
|
||||
RETRIEVE_INDEX_SQL="SELECT indexname FROM pg_indexes WHERE indexname LIKE '%fcsttimeindex%' ORDER BY indexname;"
|
||||
|
||||
# retrieve the fcsttime indices
|
||||
${PSQL} -U awips -d metadata -c "${RETRIEVE_INDEX_SQL}" -t -o ${_index_list_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of data uri indices."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for index in `cat ${_index_list_txt}`;
|
||||
do
|
||||
# remove the index
|
||||
${PSQL} -U awips -d metadata -c "DROP INDEX ${index};"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop index - ${index}."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
rm -f ${_index_list_txt}
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
# DR #1846 - this update script will remove all datauri_idx indices from the metadata database
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
RETRIEVE_INDEX_SQL="SELECT indexname FROM pg_indexes WHERE indexname LIKE '%datauri_idx%' ORDER BY indexname;"
|
||||
_index_list_txt=indexlist.txt
|
||||
|
||||
echo "INFO: update started"
|
||||
|
||||
# retrieve the indices
|
||||
${PSQL} -U awips -d metadata -c "${RETRIEVE_INDEX_SQL}" -t -o ${_index_list_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of data uri indices."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for index in `cat ${_index_list_txt}`;
|
||||
do
|
||||
# remove the index
|
||||
${PSQL} -U awips -d metadata -c "DROP INDEX ${index};"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop index - ${index}."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
rm -f ${_index_list_txt}
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,53 +0,0 @@
|
|||
-- called by updateWarningTables.sh to alter the warning and practicewarning tables
|
||||
-- and to create indexes and sequences for the activetable tables
|
||||
DROP SEQUENCE IF EXISTS practice_activetableseq;
|
||||
DROP SEQUENCE IF EXISTS activetableseq;
|
||||
DROP INDEX IF EXISTS activetable_officeid_phensig_idx;
|
||||
DROP INDEX IF EXISTS practice_activetable_officeid_phensig_idx
|
||||
DROP INDEX IF EXISTS practicewarning_office_phensig_index
|
||||
DROP INDEX IF EXISTS warning_office_phensig_index
|
||||
ALTER TABLE warning DROP COLUMN IF EXISTS ugczones;
|
||||
ALTER TABLE practicewarning DROP COLUMN IF EXISTS ugczones;
|
||||
|
||||
CREATE INDEX activetable_officeid_phensig_idx
|
||||
ON activetable
|
||||
USING btree
|
||||
(officeid COLLATE pg_catalog."default", phensig COLLATE pg_catalog."default");
|
||||
|
||||
CREATE INDEX practice_activetable_officeid_phensig_idx
|
||||
ON practice_activetable
|
||||
USING btree
|
||||
(officeid COLLATE pg_catalog."default", phensig COLLATE pg_catalog."default");
|
||||
|
||||
CREATE SEQUENCE activetableseq
|
||||
INCREMENT 1
|
||||
MINVALUE 1
|
||||
MAXVALUE 9223372036854775807
|
||||
START 1
|
||||
CACHE 1;
|
||||
ALTER TABLE activetableseq
|
||||
OWNER TO awips;
|
||||
|
||||
CREATE SEQUENCE practice_activetableseq
|
||||
INCREMENT 1
|
||||
MINVALUE 1
|
||||
MAXVALUE 9223372036854775807
|
||||
START 1
|
||||
CACHE 1;
|
||||
ALTER TABLE practice_activetableseq
|
||||
OWNER TO awips;
|
||||
|
||||
CREATE INDEX practicewarning_office_phensig_index
|
||||
ON practicewarning
|
||||
USING btree
|
||||
(officeid COLLATE pg_catalog."default", phensig COLLATE pg_catalog."default");
|
||||
|
||||
CREATE INDEX warning_office_phensig_index
|
||||
ON warning
|
||||
USING btree
|
||||
(officeid COLLATE pg_catalog."default", phensig COLLATE pg_catalog."default");
|
||||
|
||||
|
||||
ALTER TABLE warning ADD COLUMN ugczones text;
|
||||
ALTER TABLE practicewarning ADD COLUMN ugczones text;
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
-- called by normalizeGfe.sh to create new GFE tables
|
||||
DROP TABLE IF EXISTS gfe_locks CASCADE;
|
||||
DROP TABLE IF EXISTS gfe_parmid CASCADE;
|
||||
DROP TABLE IF EXISTS gfe_dbid CASCADE;
|
||||
DROP SEQUENCE IF EXISTS gfe_lock_seq;
|
||||
DROP SEQUENCE IF EXISTS gfe_parmid_seq;
|
||||
DROP SEQUENCE IF EXISTS gfe_dbid_seq;
|
||||
DROP SEQUENCE IF EXISTS gfe_history_seq;
|
||||
|
||||
CREATE TABLE gfe_dbid
|
||||
(
|
||||
id integer NOT NULL,
|
||||
dbtype character varying(15),
|
||||
format character varying(255) NOT NULL,
|
||||
modelname character varying(64) NOT NULL,
|
||||
modeltime character varying(13) NOT NULL,
|
||||
siteid character varying(4) NOT NULL,
|
||||
CONSTRAINT gfe_dbid_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT gfe_dbid_siteid_modelname_modeltime_dbtype_key UNIQUE (siteid, modelname, modeltime, dbtype)
|
||||
)
|
||||
WITH (
|
||||
OIDS=FALSE
|
||||
);
|
||||
|
||||
ALTER TABLE gfe_dbid
|
||||
OWNER TO awips;
|
||||
|
||||
CREATE TABLE gfe_parmid
|
||||
(
|
||||
id integer NOT NULL,
|
||||
parmlevel character varying(8),
|
||||
parmname character varying(100),
|
||||
dbid_id integer NOT NULL,
|
||||
CONSTRAINT gfe_parmid_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fkbec2950012156549 FOREIGN KEY (dbid_id)
|
||||
REFERENCES gfe_dbid (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE CASCADE,
|
||||
CONSTRAINT gfe_parmid_dbid_id_parmname_parmlevel_key UNIQUE (dbid_id, parmname, parmlevel)
|
||||
)
|
||||
WITH (
|
||||
OIDS=FALSE
|
||||
);
|
||||
ALTER TABLE gfe_parmid
|
||||
OWNER TO awips;
|
||||
|
||||
CREATE TABLE gfe_locks
|
||||
(
|
||||
id integer NOT NULL,
|
||||
endtime timestamp without time zone NOT NULL,
|
||||
starttime timestamp without time zone NOT NULL,
|
||||
wsid character varying(255) NOT NULL,
|
||||
parmid_id integer NOT NULL,
|
||||
CONSTRAINT gfe_locks_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fk92582e8f7bab05cc FOREIGN KEY (parmid_id)
|
||||
REFERENCES gfe_parmid (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE CASCADE,
|
||||
CONSTRAINT gfe_locks_parmid_id_starttime_endtime_key UNIQUE (parmid_id, starttime, endtime)
|
||||
)
|
||||
WITH (
|
||||
OIDS=FALSE
|
||||
);
|
||||
ALTER TABLE gfe_locks
|
||||
OWNER TO awips;
|
||||
|
||||
|
||||
CREATE SEQUENCE gfe_lock_seq
|
||||
INCREMENT 1
|
||||
MINVALUE 1
|
||||
MAXVALUE 9223372036854775807
|
||||
START 1
|
||||
CACHE 1;
|
||||
ALTER TABLE gfe_lock_seq
|
||||
OWNER TO awips;
|
||||
|
||||
CREATE SEQUENCE gfe_history_seq
|
||||
INCREMENT 1
|
||||
MINVALUE 1
|
||||
MAXVALUE 9223372036854775807
|
||||
START 1
|
||||
CACHE 1;
|
||||
ALTER TABLE gfe_history_seq
|
||||
OWNER TO awips;
|
||||
|
||||
ALTER TABLE gfe ADD COLUMN parmId_id integer;
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
# DR #1857 - this update script will create sequences for the metadata database.
|
||||
# No arguments are passed to this script. It reads sequences.txt from the local directory.
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
_sequences_txt=sequences.txt
|
||||
|
||||
echo "INFO: Creating sequences."
|
||||
|
||||
for sequence in `cat ${_sequences_txt}`;
|
||||
do
|
||||
table=${sequence%%seq}
|
||||
echo "INFO: Creating sequence ${sequence} for table ${table}"
|
||||
##To start sequence with (current max id + 1 ), uncomment the next two lines.
|
||||
#sequenceStart=`${PSQL} -tU awips -d metadata -c "SELECT max(id) FROM ${table};" | tr -d '\n' | tr -d ' '`
|
||||
#let sequenceStart=${sequenceStart}+1
|
||||
${PSQL} -U awips -d metadata -c "CREATE SEQUENCE ${sequence} INCREMENT 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 START ${sequenceStart:=1};"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE ${sequence} OWNER TO awips;"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to create sequence ${sequence}."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "INFO: sequence creation has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,17 +0,0 @@
|
|||
#!/bin/bash
|
||||
# DR #1926 - this update script will create a scan index
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
|
||||
echo "INFO: Creating scan_icao_type_idx"
|
||||
${PSQL} -U awips -d metadata -c "CREATE INDEX scan_icao_type_idx ON scan USING btree (icao COLLATE pg_catalog.\"default\", type COLLATE pg_catalog.\"default\");"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to create index."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: Index created successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,20 +0,0 @@
|
|||
#!/awips2/python/bin/python
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
# we expect the filename as a command-line argument.
|
||||
hdf5file = sys.argv[1]
|
||||
|
||||
matches = re.search(r'modelsounding-([0-9]+-[0-9]+-[0-9]+)-([0-9]+).h5', hdf5file, re.M|re.I)
|
||||
if matches:
|
||||
# extract the date
|
||||
# extract the hour
|
||||
date = matches.group(1)
|
||||
hour = matches.group(2)
|
||||
|
||||
reftimeDirName = date + "_" + hour + ":00:00.0"
|
||||
print reftimeDirName
|
||||
else:
|
||||
print "ERROR: unrecognized file - " + hdf5file + "!"
|
||||
sys.exit(-1)
|
|
@ -1,20 +0,0 @@
|
|||
#!/awips2/python/bin/python
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
# we expect the filename, model name, and forecast hour as arguments
|
||||
hdf5file = sys.argv[1]
|
||||
model = sys.argv[2]
|
||||
forecastHour = sys.argv[3]
|
||||
|
||||
matches = re.search(r'modelsounding-([0-9]+-[0-9]+-[0-9]+-[0-9]+).h5', hdf5file, re.M|re.I)
|
||||
if matches:
|
||||
# extract the reftime
|
||||
reftime = matches.group(1)
|
||||
|
||||
newFileName = "modelsounding-" + model + "-" + reftime + "-FH-" + str(forecastHour) + ".h5"
|
||||
print newFileName
|
||||
else:
|
||||
print "ERROR: unrecognized file - " + hdf5file + "!"
|
||||
sys.exit(-1)
|
|
@ -1,104 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Main script for updating GFE database structure
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
PYTHON="/awips2/python/bin/python"
|
||||
|
||||
SQL_SCRIPT="createNewGfeTables.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "Creating new GFE tables"
|
||||
${PSQL} -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Querying GFE parmIds"
|
||||
RETRIEVE_PARMIDS_SQL="SELECT distinct parmId FROM gfe order by parmID"
|
||||
_parmid_list_txt=parmIdList.txt
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${RETRIEVE_PARMIDS_SQL}" -t -o ${_parmid_list_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of parm ids."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Parsing parmIds for insertion into new tables"
|
||||
PYTHON_PARSE_SCRIPT="parseParmIds.py"
|
||||
if [ ! -f ${PYTHON_PARSE_SCRIPT} ]; then
|
||||
echo "ERROR: the required python script - ${PYTHON_PARSE_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
${PYTHON} ${PYTHON_PARSE_SCRIPT} ${_parmid_list_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to parse parm ids."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Inserting db ids"
|
||||
# dbIdInserts.sql generated from parseParmIds.py
|
||||
${PSQL} -U awips -d metadata -q -f dbIdInserts.sql
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to insert database ids."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Inserting parm ids"
|
||||
# parmIdInserts.sql generated from parseParmIds.py
|
||||
${PSQL} -U awips -d metadata -q -f parmIdInserts.sql
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to insert parm ids."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Add gfe record reference to parm id table"
|
||||
# gfeToParmIdUpdates.sql generated from parseParmIds.py
|
||||
${PSQL} -U awips -d metadata -q -f gfeToParmIdUpdates.sql
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to add gfe to parm id mapping."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Updating constraints and indexes on gfe"
|
||||
SQL_SCRIPT="updateGfeConstraintsAndIndexes.sql"
|
||||
${PSQL} -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to update constraints and indexes."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo
|
||||
echo "Updating dataURIs for gfe"
|
||||
UPDATE_DATAURIS_SQL="UPDATE gfe SET dataURI =regexp_replace(dataURI, '(/gfe/[^/]+)/([^_]+)_([^:]+):([^_]+)_GRID_([^_]*)_([^_]+)_(\\d{8}_\\d{4})/[^/]+', '\\1/\\4/\\6/\\7/\\5/\\2/\\3') where dataURI ~ '/gfe/[^/]+/[^/]+/[^/]+';"
|
||||
${PSQL} -U awips -d metadata -c "${UPDATE_DATAURIS_SQL}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of parm ids."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Running full vacuum for gfe"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL VERBOSE ANALYZE gfe"
|
|
@ -1,53 +0,0 @@
|
|||
# Called by normalizeGfe.sh to parse the distinct parmIds into table insert
|
||||
import sys
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||
|
||||
fileName = sys.argv[1]
|
||||
f = open(fileName, 'r')
|
||||
dbIdInsertFile = open('dbIdInserts.sql', 'w')
|
||||
parmIdInsertFile = open('parmIdInserts.sql', 'w')
|
||||
recordUpdateFile = open('gfeToParmIdUpdates.sql', 'w')
|
||||
|
||||
dbIds={}
|
||||
parmIds={}
|
||||
dbIdCounter = 1
|
||||
parmIdCounter = 1
|
||||
|
||||
for parmIdString in f:
|
||||
# Strip new line
|
||||
parmIdString = parmIdString.strip()
|
||||
|
||||
# skip last line of file that's empty
|
||||
if len(parmIdString.strip()) > 0:
|
||||
if not parmIds.has_key(parmIdString):
|
||||
parmIds[parmIdString] = parmIdCounter
|
||||
parmId = ParmID(parmIdString)
|
||||
dbId = parmId.getDbId()
|
||||
dbIdString = dbId.getModelId()
|
||||
|
||||
if not dbIds.has_key(dbIdString):
|
||||
dbIds[dbIdString] = dbIdCounter
|
||||
dbIdInsertFile.write("INSERT INTO gfe_dbid (id, dbtype, format, modelname, modeltime, siteid) VALUES (" +
|
||||
str(dbIdCounter) + ", '" + dbId.getDbType() + "', '" + dbId.getFormat() + "', '" +
|
||||
dbId.getModelName() + "', '" + dbId.getModelTime() + "', '" + dbId.getSiteId() + "');\n")
|
||||
dbIdCounter += 1
|
||||
dbIdVal = dbIds[dbIdString]
|
||||
parmIdInsertFile.write("INSERT INTO gfe_parmid (id, parmlevel, parmname, dbid_id) VALUES (" +
|
||||
str(parmIdCounter) + ", '" + parmId.getParmLevel() + "', '" +
|
||||
parmId.getParmName() + "', " + str(dbIdVal) + ");\n")
|
||||
recordUpdateFile.write("UPDATE gfe set parmId_id = " + str(parmIdCounter) +
|
||||
" WHERE parmId = '" + parmIdString + "';\n")
|
||||
parmIdCounter+=1
|
||||
else:
|
||||
# should never happen if query feeding this is using distinct
|
||||
print "Received duplicate parmId: " + parmIdString
|
||||
|
||||
dbIdInsertFile.write("CREATE SEQUENCE gfe_dbid_seq INCREMENT 1 MINVALUE 1 MAXVALUE 9223372036854775807 START "
|
||||
+ str((dbIdCounter / 50) + 1) + " CACHE 1;\nALTER TABLE gfe_dbid_seq OWNER TO awips;")
|
||||
parmIdInsertFile.write("CREATE SEQUENCE gfe_parmid_seq INCREMENT 1 MINVALUE 1 MAXVALUE 9223372036854775807 START "
|
||||
+ str((parmIdCounter / 50) + 1) + " CACHE 1;\nALTER TABLE gfe_parmid_seq OWNER TO awips;")
|
||||
f.close()
|
||||
dbIdInsertFile.close()
|
||||
parmIdInsertFile.close()
|
||||
recordUpdateFile.close()
|
|
@ -1,37 +0,0 @@
|
|||
# Called by updateWarningTables.sh to parse the ugc zones in table updates
|
||||
import sys
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||
|
||||
table = sys.argv[1]
|
||||
fileName = sys.argv[2]
|
||||
f = open(fileName, 'r')
|
||||
ugcZonesUpdateFile = open(table + 'UgcZonesUpdates.sql', 'w')
|
||||
|
||||
prevParentId = None
|
||||
parentId = None
|
||||
ugcZones = None
|
||||
|
||||
for line in f:
|
||||
# break line by columns
|
||||
columns=line.split('|')
|
||||
|
||||
if len(columns) > 1:
|
||||
parentId=columns[0].strip()
|
||||
ugcZone=columns[1].strip()
|
||||
|
||||
if parentId == prevParentId:
|
||||
ugcZones.append(ugcZone)
|
||||
else:
|
||||
if ugcZones is not None:
|
||||
zoneStr = ", ".join(ugcZones)
|
||||
ugcZonesUpdateFile.write("UPDATE " + table + " SET ugczones = '" + zoneStr + "' WHERE id = " + prevParentId + ";\n")
|
||||
ugcZones = [ugcZone]
|
||||
prevParentId = parentId
|
||||
|
||||
if ugcZones is not None:
|
||||
zoneStr = ", ".join(ugcZones)
|
||||
ugcZonesUpdateFile.write("UPDATE " + table + " SET ugczones = '" + zoneStr + "' WHERE id = " + prevParentId + ";\n")
|
||||
|
||||
f.close()
|
||||
ugcZonesUpdateFile.close()
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
#!/bin/bash
|
||||
# DR #1846 - this update script will remove all hdffileid columns from the metadata database
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL="SELECT table_name FROM information_schema.columns WHERE column_name = 'hdffileid';"
|
||||
_table_list_txt=tablelist.txt
|
||||
|
||||
echo "INFO: update started."
|
||||
|
||||
# retrieve the tables
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL}" -t -o ${_table_list_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of tables."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for table in `cat ${_table_list_txt}`;
|
||||
do
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE ${table} DROP COLUMN hdffileid CASCADE;"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop column hdffileid in table ${table}."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
rm -f ${_table_list_txt}
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
|
||||
# 1917 Removes old aggregate format/layout
|
||||
echo "Removing old stat aggregates"
|
||||
rm -rf /awips2/edex/data/utility/common_static/site/*/stats/aggregates
|
||||
|
||||
# run full vacuum on stats table, code keeps table more stable
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
echo "Running full vacuum on stats"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE events.stats;"
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will drop the ntrans table and remove the ntrans hdf5 files.
|
||||
#
|
||||
# This update needs to be performed when going from build 13.3.1 to build 13.4.1 (or 13.5.1).
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="DROP TABLE IF EXISTS ntrans; UPDATE plugin_info SET initialized='false' WHERE name='ntrans';"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Dropping the metadata ntrans table."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: Purging ntrans hdf5 files."
|
||||
if [ -d /awips2/edex/data/hdf5/ntrans ]; then
|
||||
rm -rfv /awips2/edex/data/hdf5/ntrans
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,89 +0,0 @@
|
|||
ffgseq
|
||||
convsigmetseq
|
||||
atcfseq
|
||||
nonconvsigmetseq
|
||||
nctextseq
|
||||
sgwhseq
|
||||
ncgribseq
|
||||
mcidasseq
|
||||
idftseq
|
||||
sshaseq
|
||||
ncscatseq
|
||||
wcpseq
|
||||
awwseq
|
||||
stormtrackseq
|
||||
ntransseq
|
||||
ncpafmseq
|
||||
intlsigmetseq
|
||||
mosaicseq
|
||||
ncscdseq
|
||||
ncuairseq
|
||||
ncccfpseq
|
||||
ncairepseq
|
||||
nctafseq
|
||||
ncpirepseq
|
||||
airmetseq
|
||||
solarimageseq
|
||||
tcmseq
|
||||
sgwhvseq
|
||||
tafseq
|
||||
ldadmesonetseq
|
||||
cwatseq
|
||||
crimssseq
|
||||
ffmpseq
|
||||
fogseq
|
||||
airepseq
|
||||
reccoseq
|
||||
bufruaseq
|
||||
cwaseq
|
||||
bufrmthdwseq
|
||||
svrwxseq
|
||||
bufrascatseq
|
||||
redbookseq
|
||||
qpfseq
|
||||
radarseq
|
||||
ccfpseq
|
||||
satelliteseq
|
||||
scanseq
|
||||
bufrhdwseq
|
||||
pirepseq
|
||||
acarsseq
|
||||
bufrsigwxseq
|
||||
lsrseq
|
||||
gridseq
|
||||
qcseq
|
||||
ldadhydroseq
|
||||
modelsoundingseq
|
||||
profilerseq
|
||||
binlightningseq
|
||||
ldadprofilerseq
|
||||
vilseq
|
||||
sfcobsseq
|
||||
tcgseq
|
||||
fssobsseq
|
||||
poessoundingseq
|
||||
nucapsseq
|
||||
goessoundingseq
|
||||
bufrquikscatseq
|
||||
gribseq
|
||||
viirsseq
|
||||
acarssoundingseq
|
||||
preciprateseq
|
||||
bufrncwfseq
|
||||
gfeseq
|
||||
tcsseq
|
||||
practicewarningseq
|
||||
warningseq
|
||||
obsseq
|
||||
bufrssmiseq
|
||||
bufrmosHpcseq
|
||||
bufrmosMrfseq
|
||||
bufrmosEtaseq
|
||||
bufrmosNgmseq
|
||||
bufrmosseq
|
||||
bufrmosAvnseq
|
||||
bufrmosGfsseq
|
||||
bufrmosLampseq
|
||||
mesowestseq
|
||||
ldad_manualseq
|
||||
vaaseq
|
|
@ -1,36 +0,0 @@
|
|||
-- Called by normalizeGfe.sh to dop and add constraints
|
||||
|
||||
ALTER TABLE gfe DROP CONSTRAINT gfe_datauri_key;
|
||||
|
||||
DROP INDEX IF EXISTS gfeParmTime_idx;
|
||||
|
||||
DROP INDEX IF EXISTS gfedatauri_idx;
|
||||
|
||||
DROP INDEX IF EXISTS gfefcsttimeindex;
|
||||
|
||||
ALTER TABLE gfe DROP COLUMN IF EXISTS parmname;
|
||||
|
||||
ALTER TABLE gfe DROP COLUMN IF EXISTS parmlevel;
|
||||
|
||||
ALTER TABLE gfe DROP COLUMN IF EXISTS dbid;
|
||||
|
||||
ALTER TABLE gfe DROP COLUMN IF EXISTS parmid;
|
||||
|
||||
ALTER TABLE gfe ADD CONSTRAINT fk18f667bab05cc FOREIGN KEY (parmid_id)
|
||||
REFERENCES gfe_parmid (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE gfe ADD CONSTRAINT gfe_parmid_id_rangestart_rangeend_reftime_forecasttime_key
|
||||
UNIQUE (parmid_id, rangestart, rangeend, reftime, forecasttime);
|
||||
|
||||
ALTER TABLE gfe_gridhistory DROP CONSTRAINT fk66434335e416514f;
|
||||
|
||||
ALTER TABLE gfe_gridhistory RENAME COLUMN key TO id;
|
||||
|
||||
ALTER TABLE gfe_gridhistory RENAME COLUMN parent to parent_id;
|
||||
|
||||
ALTER TABLE gfe_gridhistory ADD CONSTRAINT fk664343359ad1f975 FOREIGN KEY (parent_id)
|
||||
REFERENCES gfe (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE CASCADE;
|
||||
|
||||
DROP TABLE IF EXISTS gfelocktable;
|
|
@ -1,130 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# DR #1846 - this update script will re-arrange the existing modelsounding hdf5 files to divide them by
|
||||
# model name and site. Currently, every file will be copied to every potential path that it could be
|
||||
# accessed at. But, any new files that are written after the upgrade is complete, will contain only
|
||||
# the minimum amount of required data.
|
||||
|
||||
# ensure that we actually have modellsounding data to re-arrange
|
||||
DATA_DIRECTORY="/awips2/edex/data/hdf5/modelsounding"
|
||||
|
||||
if [ ! -d ${DATA_DIRECTORY} ]; then
|
||||
echo "INFO: No Model Sounding Data Was Found On The System!"
|
||||
echo "INFO: Update Terminated ..."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# determine where we are
|
||||
path_to_script=`readlink -f $0`
|
||||
dir=$(dirname $path_to_script)
|
||||
|
||||
# first, retrieve all possible models
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL="SELECT DISTINCT reporttype FROM awips.modelsounding ORDER BY reporttype;"
|
||||
_modelslist=modelslist.txt
|
||||
|
||||
echo "INFO: update started."
|
||||
pushd . > /dev/null 2>&1
|
||||
cd ${DATA_DIRECTORY}
|
||||
|
||||
# retrieve the models
|
||||
${PSQL} -U awips -d metadata -c "${SQL}" -t -o ${_modelslist}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the list of models."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PYTHON="/awips2/python/bin/python"
|
||||
_python_script="${dir}/determineRefTimeDirectory.py"
|
||||
_python_script2="${dir}/modelsoundingFileName.py"
|
||||
_fcsthourslist=fcsthourslist.txt
|
||||
|
||||
# now loop through the models
|
||||
for model in `cat ${_modelslist}`; do
|
||||
# create a directory for the model.
|
||||
mkdir -p ${DATA_DIRECTORY}/${model}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to create directory - ${DATA_DIRECTORY}/${model}!"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# retrieve the potential forecast hours for the model that we are
|
||||
# currently processing.
|
||||
SQL_FCST_HOUR="SELECT DISTINCT (fcstSeconds / 3600) AS forecastHour FROM modelsounding WHERE reporttype = '${model}' ORDER BY forecastHour;"
|
||||
${PSQL} -U awips -d metadata -c "${SQL_FCST_HOUR}" -t -o ${_fcsthourslist}
|
||||
|
||||
# loop through the hdf5 files
|
||||
for file in `ls -1 *.h5`; do
|
||||
# determine which reftime directory would be associated with the file
|
||||
reftimeDirectory=`${PYTHON} ${_python_script} "${file}"`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# create the reftime directory
|
||||
mkdir -p "${DATA_DIRECTORY}/${model}/${reftimeDirectory}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to create directory - ${DATA_DIRECTORY}/${model}/${reftimeDirectory}!"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# loop through the possible forecast hours
|
||||
for fcstHour in `cat ${_fcsthourslist}`; do
|
||||
# determine the new name of the file
|
||||
destinationFile=`${PYTHON} ${_python_script2} "${file}" "${model}" ${fcstHour}`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to determine the adjusted name of file - ${file}!"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# create a link between the files
|
||||
ln ${file} ${DATA_DIRECTORY}/${model}/${reftimeDirectory}/${destinationFile}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed create a link for ${file} to ${DATA_DIRECTORY}/${model}/${reftimeDirectory}/${destinationFile}!"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
rm -f ${_fcsthourslist}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARNING: Failed to remove temporary file - ${_fcsthourslist}."
|
||||
fi
|
||||
done
|
||||
|
||||
# remove the models list text file
|
||||
rm -f ${_modelslist}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARNING: Failed to remove temporary file - ${_modelslist}."
|
||||
fi
|
||||
|
||||
# remove the hdf5 files
|
||||
rm -f *.h5
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to remove the obsolete hdf5 files!"
|
||||
echo "WARNING: Removing the files manually is recommended."
|
||||
fi
|
||||
|
||||
popd > /dev/null 2>&1
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Main script for updating warning database structure
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
PYTHON="/awips2/python/bin/python"
|
||||
|
||||
SQL_SCRIPT="alterWarningTables.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "Adding ugczones column to warning tables"
|
||||
${PSQL} -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TABLES="practicewarning warning"
|
||||
for table in $TABLES
|
||||
do
|
||||
echo
|
||||
echo "Querying for $table ugc zones"
|
||||
RETRIEVE_UGC_ZONES_SQL="SELECT parentwarning, zone FROM warning_ugczone where parentwarning in (select id from $table) order by parentwarning, key"
|
||||
_ugc_zone_txt="${table}UgcZones.txt"
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${RETRIEVE_UGC_ZONES_SQL}" -t -o ${_ugc_zone_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to retrieve the ugc zones for $table table."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Parsing ugc zones for insertion into $table table"
|
||||
PYTHON_PARSE_SCRIPT="parseUgcZones.py"
|
||||
if [ ! -f ${PYTHON_PARSE_SCRIPT} ]; then
|
||||
echo "ERROR: the required python script - ${PYTHON_PARSE_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PYTHON} ${PYTHON_PARSE_SCRIPT} ${table} ${_ugc_zone_txt}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to parse ugc zones."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Adding ugc zones to $table table"
|
||||
# ${table}UgcZonesUpdates.sql generated from parseParmIds.py
|
||||
${PSQL} -U awips -d metadata -q -f ${table}UgcZonesUpdates.sql
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to add ugc zones."
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
#remove warning_ugczone
|
||||
echo
|
||||
echo "Dropping warning_ugczone table"
|
||||
DROP_TABLE_SQL="DROP TABLE warning_ugczone"
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${DROP_TABLE_SQL}"
|
||||
|
||||
echo
|
||||
echo "Running full vacuum for warning"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL VERBOSE ANALYZE warning"
|
|
@ -1,60 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any gribParamInfo files to use the gridParamInfo tag
|
||||
# and move gribModels files from common_static to edex_static and remove
|
||||
# deprecated fields.
|
||||
#
|
||||
# This update is optional with 13.4.1 but it must be performed before grib can
|
||||
# move to the futue.
|
||||
#
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
echo "INFO: Moving all parameterInfo files to grid."
|
||||
|
||||
IFS=$'\n'
|
||||
gribFiles=`find /awips2/edex/data/utility/edex_static/*/*/grib/parameterInfo/ -iname '*.xml'`
|
||||
|
||||
for f in $gribFiles; do
|
||||
newf=${f//grib/grid}
|
||||
if [ -e "$newf" ]; then
|
||||
echo Cannot upgrade $f because $newf already exists
|
||||
else
|
||||
mkdir -p `dirname $newf`
|
||||
mv $f $newf
|
||||
fi
|
||||
done
|
||||
|
||||
echo "INFO: Upgrading all parameterInfo files."
|
||||
|
||||
gridFiles=`find /awips2/edex/data/utility/edex_static/*/*/grid/parameterInfo/ -iname '*.xml'`
|
||||
|
||||
for f in $gridFiles; do
|
||||
sed -n 's/gribParamInfo/gridParamInfo/g;p;' -i $f
|
||||
done
|
||||
|
||||
echo "INFO: Moving all gribModels to edex_static."
|
||||
|
||||
commonFiles=`find /awips2/edex/data/utility/common_static/*/*/grib/models/ -iname '*.xml'`
|
||||
|
||||
for f in $commonFiles; do
|
||||
newf=${f//common_static/edex_static}
|
||||
if [ -e "$newf" ]; then
|
||||
echo Cannot upgrade $f because $newf already exists
|
||||
else
|
||||
mkdir -p `dirname $newf`
|
||||
mv $f $newf
|
||||
fi
|
||||
done
|
||||
|
||||
echo "INFO: Cleaning all gribModel files."
|
||||
|
||||
edexFiles=`find /awips2/edex/data/utility/edex_static/*/*/grib/models/ -iname '*.xml'`
|
||||
|
||||
for f in $edexFiles; do
|
||||
sed '/^\s*<title>.*<\/title>\s*$/d' -i $f
|
||||
sed '/^\s*<alias>.*<\/alias>\s*$/d' -i $f
|
||||
sed '/^\s*<dt>.*<\/dt>\s*$/d' -i $f
|
||||
sed '/^\s*<paramInfo>.*<\/paramInfo>\s*$/d' -i $f
|
||||
done
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="changeDataTypeEnumToUppercase.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - changing DataType to an Uppercase enum"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,32 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
-- Update dataSetType="Grid" references to dataSetType="GRID"
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, 'dataSetType="Grid"', 'dataSetType="GRID"', 'g');
|
||||
-- Update dataType="Grid" references to dataType="GRID"
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, 'dataType="Grid"', 'dataType="GRID"', 'g');
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,54 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="getSubscriptionSite.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - converting Subscription sites to a collection"
|
||||
|
||||
# run the update
|
||||
OUTPUT=`/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}`
|
||||
SITE=`echo $OUTPUT | sed 's/.*{\(.*\)}.*/\1/g'`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If we were able to find a subscription with a site, then this update will run.
|
||||
# Otherwise, there are no subscriptions to convert.
|
||||
if test "${SITE#*awips}" != "$SITE"
|
||||
then
|
||||
echo "INFO: there are no subscriptions in the database, not applying update."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SQL_SCRIPT="convertSubscriptionSitesToCollection.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Copy the sql file, replacing the SITE token with the actual site
|
||||
cat ${SQL_SCRIPT} | sed "s/@@SITE@@/${SITE}/g" > /tmp/${SQL_SCRIPT}
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f /tmp/${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove temporary file
|
||||
rm /tmp/${SQL_SCRIPT}
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,36 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
|
||||
-- Remove officeID attribute
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, 'officeID=".*?"', '', 'g');
|
||||
|
||||
-- Add officeID element
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '(<pendingSiteSubscription.*?>)', E'\\1<officeId>@@SITE@@</officeId>', 'g');
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '(<initialPendingSiteSubscription.*?>)', E'\\1<officeId>@@SITE@@</officeId>', 'g');
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '(<siteSubscription.*?>)', E'\\1<officeId>@@SITE@@</officeId>', 'g');
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,68 +0,0 @@
|
|||
#!/bin/bash
|
||||
# DR #1869 - this update script will drop the dataURI column from all tables
|
||||
# where it is no longer needed.
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
|
||||
# takes one arg: a table name
|
||||
# drops the datauri constraint and column if they exist
|
||||
function dropDatauri {
|
||||
echo "INFO: Dropping DataURI column from $1"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS ${1}_datauri_key;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP COLUMN IF EXISTS datauri;"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop dataURI column for $table"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# takes three args: table, constraint name, unique columns
|
||||
# will first drop the constraint if it exists and then adds it back, this is
|
||||
# fairly inefficient if it does exist but operationally it won't exist and for
|
||||
# testing this allows the script to be run easily as a noop.
|
||||
function dropDatauriAndAddConstraint {
|
||||
dropDatauri $1
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 DROP CONSTRAINT IF EXISTS $2;"
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE $1 ADD CONSTRAINT $2 UNIQUE $3;"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to add new unique constraint for $table"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE $1"
|
||||
}
|
||||
|
||||
echo "INFO: Dropping dataURI columns."
|
||||
|
||||
# GFE already has constraints right so just drop the column and vaccuum
|
||||
dropDatauri gfe
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE gfe"
|
||||
|
||||
# Remap the constraints for these type
|
||||
dropDatauriAndAddConstraint bufrmosavn bufrmosavn_location_id_reftime_forecasttime_key "(location_id, reftime, forecasttime)"
|
||||
dropDatauriAndAddConstraint bufrmoshpc bufrmoshpc_location_id_reftime_forecasttime_key "(location_id, reftime, forecasttime)"
|
||||
dropDatauriAndAddConstraint goessounding goessounding_stationid_reftime_latitude_longitude_key "(stationid, reftime, latitude, longitude)"
|
||||
dropDatauriAndAddConstraint poessounding poessounding_stationid_reftime_latitude_longitude_key "(stationid, reftime, latitude, longitude)"
|
||||
dropDatauriAndAddConstraint ldadmesonet ldadmesonet_stationid_reftime_reportType_dataProvider_latitude_longitude_key "(stationid, reftime, reportType, dataProvider, latitude, longitude)"
|
||||
dropDatauriAndAddConstraint qc qc_stationid_reftime_qctype_latitude_longitude_key "(stationid, reftime, qcType, latitude, longitude)"
|
||||
|
||||
# These type need a unique stationid so set one before dropping datauri.
|
||||
dropDatauriAndAddConstraint bufrascat bufrascat_stationid_reftime_satid_latitude_longitude_key "(stationid, reftime, satid, latitude, longitude)"
|
||||
${PSQL} -U awips -d metadata -c "update bufrascat set stationid = to_char(longitude, 'FM999.999') || ':' || to_char(latitude, 'FM999.999')"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE bufrascat"
|
||||
|
||||
dropDatauriAndAddConstraint bufrssmi bufrssmi_stationid_reftime_satid_latitude_longitude_key "(stationid, reftime, satid, latitude, longitude)"
|
||||
${PSQL} -U awips -d metadata -c "update bufrssmi set stationid = to_char(longitude, 'FM999.999') || ':' || to_char(latitude, 'FM999.999')"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE bufrssmi"
|
||||
|
||||
dropDatauriAndAddConstraint bufrhdw bufrhdw_stationid_reftime_sattype_pressure_latitude_longitude_key "(stationid, reftime, sattype, pressure, latitude, longitude)"
|
||||
${PSQL} -U awips -d metadata -c "update bufrhdw set stationid = to_char(longitude, 'FM999.999') || ':' || to_char(latitude, 'FM999.999')"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE bufrhdw"
|
||||
|
||||
dropDatauriAndAddConstraint bufrmthdw bufrmthdw_stationid_reftime_sattype_pressure_latitude_longitude_key "(stationid, reftime, sattype, pressure, latitude, longitude)"
|
||||
${PSQL} -U awips -d metadata -c "update bufrmthdw set stationid = to_char(longitude, 'FM999.999') || ':' || to_char(latitude, 'FM999.999')"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE bufrmthdw"
|
||||
|
||||
|
||||
echo "INFO: dataURI columns dropped successfully"
|
|
@ -1,127 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
DUMP_FILE="/tmp/dump.sql"
|
||||
SCHEMA_DEFINITION_SCRIPT="ebxmlSchemaDefinition.sql"
|
||||
TABLE_NAME_UPDATE_SCRIPT="tableNameUpdate.sql"
|
||||
|
||||
# ensure that the schema definition script is present
|
||||
if [ ! -f ${SCHEMA_DEFINITION_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SCHEMA_DEFINITION_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ensure that the table name update script is present
|
||||
if [ ! -f ${TABLE_NAME_UPDATE_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${TABLE_NAME_UPDATE_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -n "Modifying table names to conform to new schema..."
|
||||
psql -U awips -d ebxml -f $TABLE_NAME_UPDATE_SCRIPT > /dev/null
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Dumping existing ebxml database contents..."
|
||||
pg_dump --port 5432 --username "awips" --role "awips" --no-password --format plain --data-only --inserts --column-inserts --file $DUMP_FILE "ebxml"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Failed to dump existing database contents!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Setting search path in dump file to be ebxml..."
|
||||
sed -i 's/SET search_path =.*/SET search_path TO ebxml, pg_catalog;/g' $DUMP_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Failed to reset search path in dump file!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Removing references to versioninfo table..."
|
||||
sed -i "s/INSERT INTO versioninfo/--INSERT INTO versioninfo/g" $DUMP_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Removing references to versioninfo table has failed!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Updating column names for version info columns..."
|
||||
sed -i "s/versioninfo_versionname/versionname/g" $DUMP_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updating version info column names has failed!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sed -i "s/versioninfo_userversionname/userversionname/g" $DUMP_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updating version info column names has failed!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
#Redirect standard out to null, but keep standard error for the following psql commands
|
||||
echo -n "Adding tables to ebxml schema..."
|
||||
psql -U awips -d metadata -f $SCHEMA_DEFINITION_SCRIPT > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Exporting new ebxml schema has failed!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Getting current hibernate_sequence value to update new sequences..."
|
||||
sequenceValue=$(psql -t -U awips -d ebxml -c "SELECT nextval('hibernate_sequence');")
|
||||
echo "Done!"
|
||||
|
||||
echo "Current hibernate_sequence value is: $sequenceValue"
|
||||
|
||||
for seq in 'action' 'deliveryinfo' 'emailaddress' 'internationalstring' 'localizedstring' 'map' 'objectreflist' 'parameter' 'postaladdress' 'queryexpression' 'registryobjectlist' 'simplelink' 'slot' 'telephonenumber' 'value'
|
||||
do
|
||||
echo -n "Updating sequence ebxml.${seq}_sequence with value $sequenceValue to avoid key violations.."
|
||||
psql -U awips -d metadata -c "SELECT pg_catalog.setval('ebxml.${seq}_sequence', $sequenceValue, true);" > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updating sequence ${seq} has failed!"
|
||||
echo "FATAL: The update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
done
|
||||
|
||||
|
||||
echo -n "Removing references to hibernate_sequence..."
|
||||
sed -i "s/SELECT pg_catalog.setval('hibernate_sequence'/--SELECT pg_catalog.setval('hibernate_sequence'/g" $DUMP_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Removal of references to hibernate_sequence has failed!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Populating ebxml schema with existing database contents..."
|
||||
psql -U awips -d metadata -f $DUMP_FILE > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Populating ebxml schema with existing data has failed!"
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Done!"
|
||||
|
||||
echo -n "Removing dump file: $DUMP_FILE..."
|
||||
rm -f $DUMP_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Warn: File $DUMP_FILE has not been removed. Clean up manually. Update still successful."
|
||||
|
||||
else
|
||||
echo "Done!"
|
||||
fi
|
||||
|
||||
|
||||
echo "Ebxml database schema update successful!"
|
||||
exit 0
|
||||
|
File diff suppressed because it is too large
Load diff
|
@ -1,23 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
select distinct regexp_matches(stringvalue, 'officeID="(.*?)"') from ebxml.value where stringvalue like '%officeID%';
|
|
@ -1,36 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
-- Update SiteSubscription value entries to not have the notify attribute
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, 'notify=".*?" ', '', 'g');
|
||||
|
||||
-- Update GroupDefinition value entries to not have the option attribute
|
||||
-- Explanation of how this works: \\1 is the first set of parentheses, \\2 is the second set,
|
||||
-- So we are removing the option="<anything>" section out of any rows that have it and stitching the
|
||||
-- data back together.
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '(<groupDefinition.*?)option=".*?"(.*)', E'\\1\\2', 'g');
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,22 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="removeDeliveryOptions.sql"
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - removing delivery options from Subscription Manager"
|
||||
# Update the database
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,26 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "INFO: update started - removing delivery options from Subscription Manager localization files"
|
||||
|
||||
# Update subscription manager configuration files
|
||||
for DIRECTORY in `find /awips2/edex/data/utility/cave_static/ -type d -name subscriptionManagerConfig`
|
||||
do
|
||||
for FILE in `find $DIRECTORY -type f -name *.xml`
|
||||
do
|
||||
# Comments out the Delivery/Notify column entry, in case it needs to be restored (rollback scenario)
|
||||
sed -i 's/\(.*\)<column\(.*Delivery.*\)\/>/\1<!-- column\2\/-->/' $FILE
|
||||
|
||||
# Make sure each sed command succeeds
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Delete the md5 file
|
||||
rm $FILE.md5
|
||||
done
|
||||
done
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any *-harvester.xml files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build 13.5.1.
|
||||
# This update is only for edex servers which host *-harvester.xml files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
files=`find /awips2/edex/data/utility/common_static -iname \*-harvester.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
echo Updating $f
|
||||
bf=$f.bak.`date +%m%d%y`
|
||||
cp $f $bf
|
||||
# remove the registry backup tags
|
||||
awk -F '/' ' { gsub(/<primaryRegistryHost>127.0.0.1<\/primaryRegistryHost>/,"");
|
||||
gsub(/<secondaryRegistryHost>127.0.0.1<\/secondaryRegistryHost>/,"");
|
||||
gsub(/<tertiaryRegistryHost>127.0.0.1<\/tertiaryRegistryHost>/,"");
|
||||
print; } ' $bf > $f
|
||||
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,23 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
SQL_SCRIPT="renameSubscriptionToSiteSubscription.sql"
|
||||
|
||||
# ensure that the sql script is present
|
||||
if [ ! -f ${SQL_SCRIPT} ]; then
|
||||
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - renaming Subscription to SiteSubscription"
|
||||
|
||||
# run the update
|
||||
/awips2/psql/bin/psql -U awips -d metadata -f ${SQL_SCRIPT}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,71 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
\set ON_ERROR_STOP 1
|
||||
\connect metadata;
|
||||
|
||||
-- Start a transaction
|
||||
BEGIN;
|
||||
|
||||
-- New classification nodes for SiteSubscription
|
||||
insert into ebxml.classificationnode(id, lid, objecttype, owner, status, userversionname, versionname, description_key, name_key, code, parent, path) SELECT 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.SiteSubscription', 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.SiteSubscription', objecttype, owner, status, userversionname, versionname, description_key, name_key, 'com.raytheon.uf.common.datadelivery.registry.SiteSubscription', parent, '/com.raytheon.uf.common.datadelivery.registry.SiteSubscription' FROM ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.Subscription';
|
||||
insert into ebxml.classificationnode(id, lid, objecttype, owner, status, userversionname, versionname, description_key, name_key, code, parent, path) SELECT 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:SiteSubscription', 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:SiteSubscription', objecttype, owner, status, userversionname, versionname, description_key, name_key, 'SiteSubscription', parent, '/urn:oasis:names:tc:ebxml-regrep:classificationScheme:ObjectType/RegistryObject/SiteSubscription' FROM ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:Subscription';
|
||||
|
||||
-- Update foreign keys for the taxonomy to SiteSubscription
|
||||
update ebxml.taxonomyelementtype_classificationnode set classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.SiteSubscription' where classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.Subscription';
|
||||
update ebxml.taxonomyelementtype_classificationnode set classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:SiteSubscription' where classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:Subscription';
|
||||
|
||||
-- Delete Subscription classification nodes
|
||||
delete from ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:Subscription';
|
||||
delete from ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.Subscription';
|
||||
|
||||
-- Update registry object references from Subscription to SiteSubscription
|
||||
update ebxml.registryobjectlist_registryobject set registryobject_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.SiteSubscription' where registryobject_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.Subscription';
|
||||
update ebxml.registryobject set objecttype = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.SiteSubscription' where objecttype ='urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.Subscription';
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '<subscription ', '<siteSubscription ', 'g');
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '</subscription>', '</siteSubscription>', 'g');
|
||||
|
||||
-- Update the aggregated events
|
||||
update events.aggregate set grouping = regexp_replace(grouping, 'com\.raytheon\.uf\.common\.datadelivery\.registry\.Subscription', 'com.raytheon.uf.common.datadelivery.registry.SiteSubscription', 'g');
|
||||
|
||||
-- New classification nodes for InitialPendingSiteSubscription
|
||||
insert into ebxml.classificationnode(id, lid, objecttype, owner, status, userversionname, versionname, description_key, name_key, code, parent, path) SELECT 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription', 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription', objecttype, owner, status, userversionname, versionname, description_key, name_key, 'com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription', parent, '/com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription' FROM ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSubscription';
|
||||
insert into ebxml.classificationnode(id, lid, objecttype, owner, status, userversionname, versionname, description_key, name_key, code, parent, path) SELECT 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:InitialPendingSiteSubscription', 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:InitialPendingSiteSubscription', objecttype, owner, status, userversionname, versionname, description_key, name_key, 'SiteSubscription', parent, '/urn:oasis:names:tc:ebxml-regrep:classificationScheme:ObjectType/RegistryObject/SiteSubscription' FROM ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:InitialPendingSubscription';
|
||||
|
||||
-- Update foreign keys for the taxonomy to InitialPendingSiteSubscription
|
||||
update ebxml.taxonomyelementtype_classificationnode set classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription' where classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSubscription';
|
||||
update ebxml.taxonomyelementtype_classificationnode set classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:InitialPendingSiteSubscription' where classificationnode_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:InitialPendingSubscription';
|
||||
|
||||
-- Delete InitialPendingSubscription classification nodes
|
||||
delete from ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:InitialPendingSubscription';
|
||||
delete from ebxml.classificationnode where id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSubscription';
|
||||
|
||||
-- Update registry object references from InitialPendingSubscription to InitialPendingSiteSubscription
|
||||
update ebxml.registryobjectlist_registryobject set registryobject_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription' where registryobject_id = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSubscription';
|
||||
update ebxml.registryobject set objecttype = 'urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription' where objecttype ='urn:oasis:names:tc:ebxml-regrep:ObjectType:RegistryObject:com.raytheon.uf.common.datadelivery.registry.InitialPendingSubscription';
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '<pendingSubscription ', '<pendingSiteSubscription ', 'g');
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '</pendingSubscription>', '</pendingSiteSubscription>', 'g');
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '<initialPendingSubscription ', '<initialPendingSiteSubscription ', 'g');
|
||||
update ebxml.value set stringvalue = regexp_replace(stringvalue, '</initialPendingSubscription>', '</initialPendingSiteSubscription>', 'g');
|
||||
|
||||
-- Update the aggregated events
|
||||
update events.aggregate set grouping = regexp_replace(grouping, 'com\.raytheon\.uf\.common\.datadelivery\.registry\.InitialPendingSubscription', 'com.raytheon.uf.common.datadelivery.registry.InitialPendingSiteSubscription', 'g');
|
||||
|
||||
-- Commit the transaction
|
||||
END;
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will drop the ntrans table and remove the ntrans hdf5 files.
|
||||
#
|
||||
# This update needs to be performed when going from build 13.3.1 to build 13.4.1 (or 13.5.1).
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND="DROP TABLE IF EXISTS ntrans; UPDATE plugin_info SET initialized='false' WHERE name='ntrans';"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
echo "INFO: Dropping the metadata ntrans table."
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Updated Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: Purging ntrans hdf5 files."
|
||||
if [ -d /awips2/edex/data/hdf5/ntrans ]; then
|
||||
rm -rfv /awips2/edex/data/hdf5/ntrans
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,13 +0,0 @@
|
|||
ALTER TABLE localizedstrings RENAME TO localizedstring;
|
||||
ALTER TABLE intlstring RENAME TO internationalstring;
|
||||
ALTER TABLE intlstring_localizedstrings RENAME TO internationalstring_localizedstring;
|
||||
ALTER TABLE internationalstring_localizedstring RENAME COLUMN intlstring_key TO internationalstring_key;
|
||||
ALTER TABLE serviceinterfacetype RENAME TO serviceinterface;
|
||||
ALTER TABLE stringqueryexpressiontype RENAME TO stringqueryexpression;
|
||||
ALTER TABLE vocabularytermtype RENAME TO vocabularyterm;
|
||||
ALTER TABLE xmlqueryexpressiontype RENAME TO xmlqueryexpression;
|
||||
ALTER TABLE queryexpressiontype RENAME TO queryexpression;
|
||||
ALTER TABLE queryexpressiontype_slot RENAME TO queryexpression_slot;
|
||||
ALTER TABLE maptype RENAME TO map;
|
||||
ALTER TABLE entrytype RENAME TO entry;
|
||||
ALTER TABLE maptype_entrytype RENAME TO map_entry;
|
|
@ -1,15 +0,0 @@
|
|||
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
<xsl:output indent="yes"/>
|
||||
<xsl:strip-space elements="*"/>
|
||||
|
||||
<xsl:template match="node()|@*">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="node()|@*"/>
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template match="providerType">
|
||||
<providerType dataType="GRID" plugin="grid" availabilityDelay="100" />
|
||||
</xsl:template>
|
||||
|
||||
</xsl:stylesheet>
|
|
@ -1,47 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
XSLT_SCRIPT="updateProviderType.xsl"
|
||||
# ensure that the xslt script is present
|
||||
if [ ! -f ${XSLT_SCRIPT} ]; then
|
||||
echo "ERROR: the required xslt script - ${XSLT_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - updating ProviderType to be a class proper in the database"
|
||||
|
||||
# Dump the provider rows from the database for modification
|
||||
PROVIDER_ROWS=/tmp/provider_rows.tmp
|
||||
psql -U awips -d metadata -c "\copy (select key, stringvalue from ebxml.value where stringvalue like '%<provider %') To '${PROVIDER_ROWS}'";
|
||||
|
||||
# Get old separator
|
||||
OIFS=$IFS
|
||||
|
||||
IFS=$'\n'
|
||||
for f in `cat ${PROVIDER_ROWS}`
|
||||
do
|
||||
IFS=$'\t'
|
||||
arr2=( $f )
|
||||
|
||||
KEY=${arr2[0]}
|
||||
XML_FILE=/tmp/${KEY}.xml
|
||||
|
||||
# Write out database contents
|
||||
echo "${arr2[1]}" > ${XML_FILE}
|
||||
# Remove carriage returns
|
||||
sed -i 's/\\n//g' ${XML_FILE}
|
||||
|
||||
# Run the xslt transform on the tmp file
|
||||
xsltproc ${XSLT_SCRIPT} ${XML_FILE} > ${XML_FILE}.new
|
||||
|
||||
# Insert the new xml into the database
|
||||
NEW_XML=`cat ${XML_FILE}.new`
|
||||
psql -U awips -d metadata -c "UPDATE ebxml.value SET stringvalue = '${NEW_XML}' WHERE key = '${KEY}'"
|
||||
done
|
||||
|
||||
# Restore old separator
|
||||
IFS=$OIFS
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,31 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
XSLT_SCRIPT="updateProviderType.xsl"
|
||||
# ensure that the xslt script is present
|
||||
if [ ! -f ${XSLT_SCRIPT} ]; then
|
||||
echo "ERROR: the required xslt script - ${XSLT_SCRIPT} was not found."
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: update started - updating ProviderType to be a class proper in localization files"
|
||||
|
||||
# Update subscription manager configuration files
|
||||
for FILE in `find /awips2/edex/data/utility/common_static -iname \*-harvester.xml`
|
||||
do
|
||||
cp $FILE $FILE.bak
|
||||
xsltproc ${XSLT_SCRIPT} ${FILE}.bak > ${FILE}
|
||||
|
||||
# Make sure each command succeeds
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: the update has failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Delete the md5 file
|
||||
rm $FILE.md5
|
||||
done
|
||||
|
||||
echo "INFO: the update has completed successfully!"
|
||||
|
||||
exit 0
|
|
@ -1,203 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
from ufpy import ConfigFileUtil
|
||||
|
||||
|
||||
logging.basicConfig(format="%(asctime)s %(levelname)s: %(message)s",
|
||||
datefmt="%H:%M:%S",
|
||||
level=logging.INFO)
|
||||
|
||||
|
||||
SVCBU_CONFIG_FILENAME = "/awips2/GFESuite/ServiceBackup/configuration/svcbu.properties"
|
||||
SVCBU_CONFIG_BACKUP_FILENAME = SVCBU_CONFIG_FILENAME + ".bak"
|
||||
SVCBU_CONFIG_MIGRATE = ['SVCBU_HOST', 'MSG_SEND_COMMAND', 'CDSPORT', 'SVCBU_DB',
|
||||
'SVCBU_TRIM_ELEMS', 'SVCBU_FAILED_SITE_PORT',
|
||||
'SVCBU_GRIDAREA', 'SVCBU_ADDRESSEE', 'SVCBU_WMO_HEADER',
|
||||
'SVCBU_USER', 'SVCBU_USER_ID', 'EXPORT_GRID']
|
||||
STATIC_CONFIG_DATA = """
|
||||
#Variables used by service backup:
|
||||
#
|
||||
# AWIPS_HOME: The AWIPS II installation directory.
|
||||
#
|
||||
# GFESUITE_HOME: The server directory containing files and programs
|
||||
# used by GFE during Service Backup
|
||||
#
|
||||
# GFESUITE_BIN: Directory containing GFE server side utility
|
||||
# programs including ifpnetCDF and iscMosaic
|
||||
#
|
||||
# SVCBU_HOME: Directory used by service backup as a sandbox for
|
||||
# constructing files to be sent and for processing
|
||||
# received files.
|
||||
#
|
||||
# LOCALIZATION_PATH: This is the path to the root of the localization
|
||||
# directory. This path is used for properly importing
|
||||
# and exporting configuration data
|
||||
#
|
||||
# IFPS_LOG: Directory containing logs for the service backup
|
||||
# operations.
|
||||
#
|
||||
# IFPS_DATA: Directory containing the svcbu_export_elements file.
|
||||
# This file is used to specify which weather elements are
|
||||
# packaged and sent when exporting digital data for a
|
||||
# site.
|
||||
#
|
||||
# LOCK_DIR: Directory used for lock files. Each Service Backup
|
||||
# operation maintains a lock file during its execution.
|
||||
# The lock remains for the duration of the operation and
|
||||
# is erased upon completion to prevent simultaneous
|
||||
# operations from occurring.
|
||||
#
|
||||
# SCRIPTS_DIR: Directory containing the scripts used by service
|
||||
# backup
|
||||
#
|
||||
# CAVE_LAUNCH_SCRIPT: This path points to the script which starts GFE. This
|
||||
# variable is read when the user hits the 'Enable' button
|
||||
# On the service backup GUI.
|
||||
#
|
||||
# SVCBU_HOST: Server where the service backup scripts will be
|
||||
# executed.
|
||||
#
|
||||
# MSG_SEND_COMMAND: The command executed to send a message via the message handling
|
||||
# system. This value will usually be msg_send. But, it can be
|
||||
# changed to a different command in a test environment.
|
||||
#
|
||||
# CDSPORT: This is the port on which the Thrift Client listens
|
||||
# for script execution events.
|
||||
#
|
||||
# SVCBU_DB: Defines which database to use for exporting
|
||||
# grids to central server for service backup.
|
||||
# VALID VALUES: Fcst
|
||||
# Official (default)
|
||||
#
|
||||
# SVCBU_TRIM_ELEMS: Indication of whether ifpnetCDF needs to trim
|
||||
# off elements while exporting grids to central
|
||||
# server.
|
||||
# VALID VALUES: 1 - To do element trimming
|
||||
# 0 - To disable element trimming
|
||||
# Note: ${IFPS_DATA}/svcbu_export_elements.ccc
|
||||
# file has to be present for this to work. This file
|
||||
# will contain list of elements to include in the
|
||||
# netcdf file that's being sent over to central srv.
|
||||
#
|
||||
# SVCBU_FAILED_SITE_PORT: Unused
|
||||
#
|
||||
# SVCBU_GRIDAREA: The name of the edit area used when exporting grids
|
||||
# to the central server for service backup and
|
||||
# imported to the Restore databse after service backup.
|
||||
# DEFUALT VALUE: ISC_Send_Area
|
||||
#
|
||||
# SVCBU_ADDRESSEE: The name of the msg_send addressee. Will be used to
|
||||
# pass with -a flag of msg_send. (NCF use only).
|
||||
#
|
||||
# SVCBU_WMO_HEADER: The WMO header that will be used to pass in calls to
|
||||
# msg_send with -i argument. This will be empty to
|
||||
# begin with. Should not be changed. (NCF use only)
|
||||
#
|
||||
# EXPORT_GRID Indicate the ways of grid being exported
|
||||
# VALID VALUES: 0 = do not export grids
|
||||
# 1 = grids are exported by quartz timer
|
||||
# at 15 after each hour, the service
|
||||
# backup GUI, and from GFE via the
|
||||
# 'Send Grids to NDFD...' script
|
||||
# 2 = grids are exported only by the service backup GUI and from GFE via the 'Send
|
||||
# Grids to NDFD...' script'
|
||||
#
|
||||
# SVCBU_USER Indicates that the site can configure a special user to
|
||||
# run GFE when in service backup
|
||||
# VALID VALUES: 0 = do not use a designated user to run
|
||||
# GFE when in service backup
|
||||
# 1 = use a designated user to run GFE
|
||||
# when in service backup
|
||||
#
|
||||
# SVCBU_USER_ID The user id of the designated user to run GFE when
|
||||
# in service backup
|
||||
#
|
||||
# PRIMARY_SITES (Optional) For dual-domain sites, a comma-separated
|
||||
# list of sites for the export grids cron to run for
|
||||
# instead of the site defined as AW_SITE_IDENTIFIER. If
|
||||
# this setting is empty or not defined, cron will only
|
||||
# export grids for site set as AW_SITE_IDENTIFIER.
|
||||
#
|
||||
#
|
||||
# Directories used by Service Backup
|
||||
GFESUITE_HOME=/awips2/GFESuite
|
||||
GFESUITE_BIN=/awips2/GFESuite/bin
|
||||
SVCBU_HOME=/awips2/GFESuite/ServiceBackup/svcbu
|
||||
LOCALIZATION_PATH=/awips2/edex/data/utility
|
||||
IFPS_LOG=/awips2/GFESuite/ServiceBackup/logs
|
||||
IFPS_DATA=/awips2/GFESuite/ServiceBackup/data
|
||||
LOCK_DIR=/awips2/GFESuite/ServiceBackup/locks
|
||||
SCRIPTS_DIR=/awips2/GFESuite/ServiceBackup/scripts
|
||||
CAVE_LAUNCH_SCRIPT=/awips2/cave/cave.sh
|
||||
|
||||
"""
|
||||
|
||||
|
||||
|
||||
def get_old_config():
|
||||
return ConfigFileUtil.parseKeyValueFile(SVCBU_CONFIG_FILENAME)
|
||||
|
||||
def backup_old_config():
|
||||
shutil.move(SVCBU_CONFIG_FILENAME, SVCBU_CONFIG_BACKUP_FILENAME)
|
||||
|
||||
def write_new_config(old_vals):
|
||||
with open(SVCBU_CONFIG_FILENAME, 'w') as configOut:
|
||||
configOut.write(STATIC_CONFIG_DATA)
|
||||
for entry in SVCBU_CONFIG_MIGRATE:
|
||||
oldValue = old_vals[entry]
|
||||
configOut.write(entry + "=" + oldValue + "\n")
|
||||
configOut.write("PRIMARY_SITES=")
|
||||
if "NATIONAL_CENTER" in old_vals and old_vals["NATIONAL_CENTER"] == '1':
|
||||
logging.warning("Since this system was previously configured as a " +
|
||||
"national center, please configure the " +
|
||||
"PRIMARY_SITES setting with your GFE sites after " +
|
||||
"this script is complete.")
|
||||
|
||||
def main():
|
||||
logging.info("Migrating svcbu.properties for 13.5.1.")
|
||||
|
||||
try:
|
||||
oldConfig = get_old_config()
|
||||
except:
|
||||
logging.exception("Could not read old configuration from " + SVCBU_CONFIG_FILENAME)
|
||||
|
||||
try:
|
||||
backup_old_config()
|
||||
except:
|
||||
logging.exception("Could not backup previous svcbu.properties.")
|
||||
|
||||
try:
|
||||
write_new_config(oldConfig)
|
||||
except:
|
||||
logging.exception("Could not write new svcbu.properties.")
|
||||
|
||||
logging.info("Migration complete.")
|
||||
logging.info("After you have verified that svcbu.properties was properly migrated, " +
|
||||
"please delete the file " + SVCBU_CONFIG_BACKUP_FILENAME + ".")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,22 +0,0 @@
|
|||
#!/bin/bash
|
||||
# DR #2275 - this script is needd to add the dataURI column back into the
|
||||
# bufrmosavn and bufrmoshpc tables.
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE bufrmosavn ADD COLUMN datauri character varying(255);"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to drop dataURI column for bufrmosavn"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE bufrmoshpc ADD COLUMN datauri character varying(255);"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to add dataURI column for bufrmoshpc"
|
||||
echo "FATAL: The update has failed."
|
||||
exit 1
|
||||
fi
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE bufrmosavn"
|
||||
${PSQL} -U awips -d metadata -c "VACUUM FULL ANALYZE bufrmoshpc"
|
||||
|
||||
echo "INFO: dataURI columns added successfully"
|
|
@ -1,18 +0,0 @@
|
|||
#!/bin/sh
|
||||
# This script should be run on dx1 as part of next delivery to update the definitions for
|
||||
# the stdtextproducts and practicestdtextproducts tables.
|
||||
|
||||
psql -U awips -d fxa -c "ALTER TABLE stdtextproducts RENAME COLUMN createtime to reftime;"
|
||||
psql -U awips -d fxa -c "ALTER TABLE practicestdtextproducts RENAME COLUMN createtime to reftime;"
|
||||
|
||||
psql -U awips -d fxa -c "ALTER TABLE stdtextproducts ADD COLUMN inserttime timestamp without time zone;"
|
||||
psql -U awips -d fxa -c "ALTER TABLE practicestdtextproducts ADD COLUMN inserttime timestamp without time zone;"
|
||||
|
||||
psql -U awips -d fxa -c "CREATE INDEX stdtextproductsinserttimeindex ON stdtextproducts USING btree (inserttime);"
|
||||
psql -U awips -d fxa -c "CREATE INDEX practicestdtextproductsinserttimeindex ON practicestdtextproducts USING btree (inserttime);"
|
||||
|
||||
psql -U awips -d fxa -c "UPDATE stdtextproducts SET inserttime = CURRENT_TIMESTAMP AT TIME ZONE 'GMT' WHERE inserttime IS NULL;"
|
||||
psql -U awips -d fxa -c "UPDATE practicestdtextproducts SET inserttime = CURRENT_TIMESTAMP AT TIME ZONE 'GMT' WHERE inserttime IS NULL;"
|
||||
|
||||
psql -U awips -d fxa -c "ALTER TABLE stdtextproducts ALTER COLUMN inserttime SET NOT NULL;"
|
||||
psql -U awips -d fxa -c "ALTER TABLE practicestdtextproducts ALTER COLUMN inserttime SET NOT NULL;"
|
|
@ -1,82 +0,0 @@
|
|||
|
||||
import os, subprocess, re, stat, sys, time
|
||||
import h5py
|
||||
|
||||
matchRe = re.compile('.*?(-{1,2}\\d{6,}).h5')
|
||||
|
||||
def findEachGroup(group, datasetList):
|
||||
if type(group) is h5py.highlevel.Group:
|
||||
for g in group.keys():
|
||||
findEachGroup(group[g], datasetList)
|
||||
elif type(group) is h5py.highlevel.Dataset:
|
||||
datasetList.append(group.name)
|
||||
|
||||
|
||||
def processFile(filename, match):
|
||||
startIndex = filename.find(match.groups()[0])
|
||||
endIndex = filename.find('.h5')
|
||||
reducedFilename = filename[0:startIndex] + filename[endIndex:]
|
||||
if not os.path.exists(reducedFilename):
|
||||
# this is the first one, just rename it
|
||||
try:
|
||||
os.rename(filename, reducedFilename)
|
||||
except OSError, e:
|
||||
print e
|
||||
else:
|
||||
# open the file, find the datasets
|
||||
datasetList = []
|
||||
hfile = None
|
||||
try:
|
||||
hfile = h5py.File(filename, 'r')
|
||||
findEachGroup(hfile['/'], datasetList)
|
||||
finally:
|
||||
if hfile:
|
||||
hfile.close()
|
||||
|
||||
fileSuccess = True
|
||||
# for each dataset in the file, run h5copy it into the output file
|
||||
for dataset in datasetList:
|
||||
if not copy(filename, dataset, reducedFilename):
|
||||
fileSuccess = False
|
||||
|
||||
# remove original file
|
||||
if True: #if fileSuccess:
|
||||
os.remove(filename)
|
||||
|
||||
def fileWalk(pth):
|
||||
if os.path.isdir(pth):
|
||||
innerFiles = os.listdir(pth)
|
||||
for f in innerFiles:
|
||||
fileWalk(pth + '/' + f)
|
||||
else:
|
||||
match = matchRe.match(pth)
|
||||
if match:
|
||||
processFile(pth, match)
|
||||
|
||||
def copy(filename, dataset, reducedFilename):
|
||||
# note that this copies links as if they were real datasets, increasing the size of the output file
|
||||
cmd = ['h5copy', '-p', '-i', filename, '-o', reducedFilename, '-s', dataset, '-d', dataset]
|
||||
ret = subprocess.call(cmd)
|
||||
success = (ret == 0)
|
||||
|
||||
if success:
|
||||
os.chmod(reducedFilename, stat.S_IWUSR | stat.S_IWGRP | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
#print "Successfully copied filename:", filename, "dataset:", dataset
|
||||
return True
|
||||
else:
|
||||
print "Failed to copy filename:", filename, "dataset:", dataset
|
||||
return False
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print "Please provide full path to input directory"
|
||||
else:
|
||||
inputDir = sys.argv[1]
|
||||
t0 = time.time()
|
||||
fileWalk(inputDir)
|
||||
t1 = time.time()
|
||||
print "Total copy time for directory", inputDir, (t1-t0), "seconds"
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,95 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
||||
def forceTextProdRegen():
|
||||
oldTextProds = glob.glob('/awips2/edex/data/utility/cave_static/configured/*/gfe/userPython/textProducts/*.py*')
|
||||
for script in oldTextProds:
|
||||
try:
|
||||
os.remove(script)
|
||||
except:
|
||||
pass
|
||||
oldTextUtils = glob.glob('/awips2/edex/data/utility/cave_static/configured/*/gfe/userPython/textUtilities/regular/*.py*')
|
||||
for script in oldTextUtils:
|
||||
try:
|
||||
os.remove(script)
|
||||
except:
|
||||
pass
|
||||
# touch shapefile and template file to force regen of textProducts and all textUtilities
|
||||
shapeFile = glob.glob('/awips2/edex/data/utility/edex_static/base/shapefiles/*/*.shp')[0]
|
||||
prodTemplate = glob.glob('/awips2/edex/data/utility/edex_static/base/textproducts/templates/product/*.py')[0]
|
||||
# passing None as the second arg is equivalent to running touch
|
||||
os.utime(shapeFile, None)
|
||||
os.utime(prodTemplate, None)
|
||||
|
||||
def relocateSiteLevelUtils():
|
||||
sitePaths = getSubDirs('/awips2/edex/data/utility/cave_static/site')
|
||||
for site in sitePaths:
|
||||
scripts = glob.glob(os.path.join(site, 'gfe/userPython/textProducts/*.py'))
|
||||
for script in scripts:
|
||||
if not isTextProduct(script):
|
||||
moveToUtilities(script)
|
||||
|
||||
def relocateUserLevelUtils():
|
||||
userPaths = getSubDirs('/awips2/edex/data/utility/cave_static/user')
|
||||
for user in userPaths:
|
||||
scripts = glob.glob(os.path.join(user, 'gfe/userPython/textProducts/*.py'))
|
||||
for script in scripts:
|
||||
if not isTextProduct(script):
|
||||
moveToUtilities(script)
|
||||
|
||||
def getSubDirs(path):
|
||||
return [os.path.join(path, name) for name in os.listdir(path)
|
||||
if os.path.isdir(os.path.join(path, name))]
|
||||
|
||||
def isTextProduct(path):
|
||||
retVal = False
|
||||
with open(path, 'r') as f:
|
||||
txt = f.read()
|
||||
if "class TextProduct" in txt:
|
||||
retVal = True
|
||||
return retVal
|
||||
|
||||
def moveToUtilities(srcPath):
|
||||
destPath = srcPath.replace('textProducts', 'textUtilities/regular', 1)
|
||||
if not os.path.isdir(os.path.dirname(destPath)):
|
||||
os.makedirs(os.path.dirname(destPath))
|
||||
shutil.move(srcPath, destPath)
|
||||
# make sure any .pyo, .pyc, and .md5 files are not left behind
|
||||
garbageFiles = glob.glob(srcPath + "*")
|
||||
for file in garbageFiles:
|
||||
try:
|
||||
os.remove(file)
|
||||
except:
|
||||
pass
|
||||
|
||||
def main():
|
||||
forceTextProdRegen()
|
||||
relocateSiteLevelUtils()
|
||||
relocateUserLevelUtils()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/sh
|
||||
# This script should be run on dx1 as part of next delivery to remove the hdfFileId columns
|
||||
|
||||
tablesToUpdate=`psql -U awips -d metadata -t -c "select table_name from information_schema.columns where table_catalog='metadata' and table_schema='awips' and column_name = 'hdffileid';"`
|
||||
|
||||
for table in $tablesToUpdate
|
||||
do
|
||||
psql -U awips -d metadata -t -c "ALTER TABLE $table DROP COLUMN hdffileid"
|
||||
done
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
This directory contains all the scripts needed to upgrade from grib to the new
|
||||
"Unified" grid storage format.
|
||||
|
||||
The following commands will need to run to upgrade postgres and hdf5 files:
|
||||
bash register_grid_coverage.sh
|
||||
bash create_grid_tables.sh
|
||||
python convert_grib_data.py
|
||||
|
||||
The following commands will need to run to upgrade localization files:
|
||||
bash copy_grib_purge_rules.sh
|
||||
bash update_D2D_bundles.sh
|
||||
bash update_D2D_procedures.sh
|
||||
bash update_FFMP_Source.sh
|
||||
|
||||
convert_grib_data.py can take a significant amount of time, as much as 2 hours.
|
||||
If you don't need your data and you want to upgrade faster you can purge all grib data.
|
||||
If there is no grib data to convert there is no need to run convert_grib_data.py.
|
||||
|
||||
After convert_grib_data.py has run, it may not convert all models(perhaps skipping
|
||||
alaskan, hawaiin, or other models.) If this is the case there will be data left over
|
||||
in /awips2/edex/data/hdf5/grib. This data can be used if you need to convert additional
|
||||
models or if a rollback is necessary. The system is not set up to purge this data, so
|
||||
after a successful upgrade when new data is arriving this directory will need to be deleted.
|
||||
|
||||
The format of the data in /awips2/edex/data/hdf5/topo/modelStaticTopo.h5 has changed. When the
|
||||
ingestGrib edex is started it will attempt to regenerate this file. This is a very time and
|
||||
memory intensive process. To save time when upgrading an operational system it is recommended
|
||||
that you copy a modelStaticTopo.h5 file from a testbed or other system that has already generated it.
|
||||
|
||||
The update_saved_display.sh script can be used if there are any saved displays that are
|
||||
saved outside of localization.
|
||||
|
||||
If for some reason the upgrade fails or you want to roll back. Install an old
|
||||
version of edex and run the scripts in the unified_grid_rollback directory.
|
||||
The postgres and hdf5 scripts in that directory will need to be run in the opposite order:
|
||||
python convert_grib_data.py
|
||||
bash create_grid_tables.sh
|
||||
bash register_grid_coverage.sh
|
|
@ -1,351 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# This script will add register the gridcoverage plugin, which was previosly part of grib
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
# create_grid_tables.sh must be run before running this script.
|
||||
|
||||
from shutil import copytree, move, copy
|
||||
from subprocess import Popen, PIPE
|
||||
from thread import start_new_thread, allocate_lock
|
||||
import sys
|
||||
from os.path import exists, isdir
|
||||
from os import mkdir
|
||||
from time import sleep, time
|
||||
import h5py
|
||||
import struct
|
||||
|
||||
# this is generally a disk bound process so more threads won't help unless the disk is fast
|
||||
numThreads = 1
|
||||
# setting too many records at once causes the exec to psql to fail because of the long arg list
|
||||
maxRecords = 200
|
||||
|
||||
postgresCmd = "psql -U awips -d metadata -t -q -A -c "
|
||||
hdf5loc = "/awips2/edex/data/hdf5/"
|
||||
|
||||
quadrantGrids = ["ENSEMBLE37", "ENSEMBLE38", "ENSEMBLE39", "ENSEMBLE40",
|
||||
"ECMF1", "ECMF2", "ECMF3", "ECMF4", "ECMF5", "ECMF6", "ECMF7", "ECMF8",
|
||||
"UKMET40", "UKMET39", "UKMET38", "UKMET37"]
|
||||
|
||||
akGrids = ["MOSGuide-AK", "AK-NamDNG5","AK-RTMA", "AKWAVE239", "AKwave10", "AKwave4", "HiResW-NMM-AK", "HiResW-ARW-AK",
|
||||
"ETA242", "mesoEta217", "mesoEta216","ETA207", "AVN203", "MRF203", "GFS160"]
|
||||
|
||||
prGrids = ["HiResW-NMM-SJU", "HiResW-ARW-SJU", "PR-NamDNG5", "PR-RTMA", "MRF205", "GFS161", "mesoEta237"]
|
||||
|
||||
hiGrids = ["HI-NamDNG5", "HI-RTMA", "HiResW-NMM-HI", "HiResW-ARW-HI", "MRF204", "AVN225", "GFS254", "SREF243"]
|
||||
|
||||
guGrids = ["HiResW-NMM-GU", "HiResW-ARW-GU"]
|
||||
|
||||
blacklistGrids = {"quadrant grids which have already been converted in an assembled format":quadrantGrids,
|
||||
"grids over Alaska":akGrids, "grids over Puerto Rico":prGrids,
|
||||
"grids over Hawaii and the Pacific Region":hiGrids, "grids over Guam":guGrids}
|
||||
|
||||
|
||||
parameters = {}
|
||||
models = []
|
||||
gridinfo_seq = []
|
||||
models_lock = allocate_lock()
|
||||
|
||||
|
||||
def queryPostgres(sql):
|
||||
result = Popen(postgresCmd + "\"" + sql + "\"", stdout=PIPE, shell=True)
|
||||
retVal = []
|
||||
for line in result.stdout:
|
||||
retVal.append(line.strip().split("|"))
|
||||
return retVal
|
||||
|
||||
def convertModel(modelName):
|
||||
sqlTime = 0
|
||||
hdfTime = 0
|
||||
totTime = 0
|
||||
totTime -= time()
|
||||
print modelName, "Loading existing grid_info"
|
||||
infoMap = loadGridInfo(modelName)
|
||||
infoSql = None
|
||||
print modelName, "Querying grib database"
|
||||
rows = queryPostgres("select grib.forecasttime, grib.reftime, grib.utilityflags, grib.rangeend,grib.rangestart, grib.inserttime, grib.datauri, gridcoverage.id, grib_models.level_id, grib_models.location_id from grib, grib_models, gridcoverage, level where grib.modelinfo_id = grib_models.id and grib_models.location_id = gridcoverage.id and grib_models.level_id = level.id and grib_models.modelName = '%s' order by grib.forecasttime, grib.reftime, level.masterlevel_name" % modelName)
|
||||
print modelName, "Converting %d records" % len(rows)
|
||||
gridSql = None
|
||||
lastFile = None
|
||||
gribFiles = hdf5loc + "grib/" + modelName + "/"
|
||||
gridFiles = hdf5loc + "grid/" + modelName + "/"
|
||||
if not(isdir(hdf5loc + "grid/")):
|
||||
mkdir(hdf5loc + "grid/")
|
||||
if not(isdir(gridFiles)):
|
||||
mkdir(gridFiles)
|
||||
count = 0;
|
||||
for row in rows:
|
||||
gribforecasttime = row[0]
|
||||
gribreftime = row[1]
|
||||
gributilityflags = row[2]
|
||||
gribrangeend = row[3]
|
||||
gribrangestart = row[4]
|
||||
gribinserttime = row[5]
|
||||
gribdatauri = row[6]
|
||||
gridcoverageid = row[7]
|
||||
gribmodelslevelid = row[8]
|
||||
gribmodelslocationid = row[9]
|
||||
datauriparts = gribdatauri.split("/")
|
||||
datatime = datauriparts[2]
|
||||
paramabbrev = datauriparts[4]
|
||||
masterlevel = datauriparts[5]
|
||||
levelone = datauriparts[6]
|
||||
leveltwo = datauriparts[7]
|
||||
pert = datauriparts[9]
|
||||
version = datauriparts[10]
|
||||
secondaryId = "null"
|
||||
if version != "0":
|
||||
secondaryId = "Version" + version
|
||||
ensembleId = convertPert(pert)
|
||||
newdatauri = "/grid/" + datatime + "/" + modelName + "/" + secondaryId + "/" + ensembleId + "/" + gridcoverageid + "/" + paramabbrev + "/" + masterlevel + "/" + levelone + "/" + leveltwo
|
||||
hdfTime -= time()
|
||||
try:
|
||||
forecast = int(gribforecasttime)/3600
|
||||
prevgrp = gribdatauri
|
||||
newgrp = newdatauri
|
||||
dataset="Data"
|
||||
if paramabbrev.startswith("static"):
|
||||
prevgrp = "/"
|
||||
newgrp = "/" + gridcoverageid
|
||||
dataset=paramabbrev
|
||||
filebase = "/%s-%s-FH-%.3d.h5" % (modelName, gribreftime.split(":")[0].replace(" ", "-"), forecast)
|
||||
hdf5file = gridFiles + masterlevel + filebase
|
||||
if not(paramabbrev.startswith("static")) or forecast == 0:
|
||||
if lastFile != None and lastFile.filename != hdf5file:
|
||||
#print "Closing", lastFile.filename
|
||||
lastFile.close()
|
||||
lastFile = None
|
||||
if lastFile == None:
|
||||
if not(exists(hdf5file)):
|
||||
t0 = time()
|
||||
if not(isdir(gridFiles + masterlevel)):
|
||||
mkdir(gridFiles + masterlevel)
|
||||
move(gribFiles + masterlevel + filebase, gridFiles + masterlevel)
|
||||
hdfTime -= (time() - t0)
|
||||
#print "Opening", hdf5file
|
||||
lastFile = h5py.File(hdf5file)
|
||||
copyH5(lastFile, prevgrp, newgrp, dataset)
|
||||
except:
|
||||
print modelName, "Error", gribdatauri
|
||||
print sys.exc_info()[1]
|
||||
hdfTime += time()
|
||||
continue
|
||||
hdfTime += time()
|
||||
infokey = modelName + ":::" + secondaryId + ":::" + ensembleId + ":::" + gribmodelslevelid + ":::" + gribmodelslocationid + ":::" + paramabbrev
|
||||
infoid = infoMap.get(infokey)
|
||||
if infoid == None:
|
||||
infoid = nextGridInfoSeq()
|
||||
infoMap[infokey] = infoid
|
||||
if secondaryId == "null":
|
||||
secondaryId = "NULL"
|
||||
else:
|
||||
secondaryId = "\'" + secondaryId + "\'"
|
||||
if ensembleId == "null":
|
||||
ensembleId = "NULL"
|
||||
else:
|
||||
ensembleId = "\'" + ensembleId + "\'"
|
||||
if infoSql == None:
|
||||
infoSql = "insert into grid_info (id, datasetid, secondaryid, ensembleid, level_id, location_id, parameter_abbreviation) values "
|
||||
else:
|
||||
infoSql = infoSql + ", "
|
||||
infoSql = infoSql + ("(%d, '%s', %s, %s, %s, %s, '%s')" % (infoid, modelName, secondaryId, ensembleId, gribmodelslevelid, gribmodelslocationid, paramabbrev))
|
||||
if gridSql == None:
|
||||
gridSql = "insert into grid (id, forecasttime, reftime, utilityflags, rangeend, rangestart, datauri, inserttime, info_id) values "
|
||||
else:
|
||||
gridSql = gridSql + ", "
|
||||
gridSql = gridSql + ("(nextval(\'hibernate_sequence\'), %s, '%s', '%s', '%s', '%s', '%s', '%s', %d)" % (gribforecasttime, gribreftime, gributilityflags, gribrangeend, gribrangestart, newdatauri, gribinserttime, infoid))
|
||||
count += 1
|
||||
if count % maxRecords == 0:
|
||||
print modelName, "Commiting %d grid records %d%%" % (maxRecords,100*count/len(rows))
|
||||
sqlTime -= time()
|
||||
if infoSql != None:
|
||||
#print infoSql
|
||||
queryPostgres(infoSql)
|
||||
infoSql = None
|
||||
if gridSql != None:
|
||||
#print gridSql
|
||||
queryPostgres(gridSql)
|
||||
gridSql = None
|
||||
sqlTime += time()
|
||||
print modelName, "Commiting remaining grid records"
|
||||
sqlTime -= time()
|
||||
if infoSql != None:
|
||||
#print infoSql
|
||||
queryPostgres(infoSql)
|
||||
if gridSql != None:
|
||||
#print gridSql
|
||||
queryPostgres(gridSql)
|
||||
if lastFile != None:
|
||||
lastFile.close()
|
||||
sqlTime += time()
|
||||
totTime += time()
|
||||
print modelName, "Time in sql commits = %ds" % (sqlTime)
|
||||
print modelName, "Time in hdf5 links = %ds" % (hdfTime)
|
||||
print modelName, "Total process Time = %ds" % (totTime)
|
||||
|
||||
def loadGridInfo(modelName):
|
||||
infoMap = {}
|
||||
for row in queryPostgres("select distinct id, datasetid, secondaryid, ensembleid, level_id, location_id, parameter_abbreviation, id from grid_info where datasetid = '%s'" % (modelName)):
|
||||
infokey = row[1] + ":::" + row[2] + ":::" + row[3] + ":::" + row[4] + ":::" + row[5] + ":::" + row[6]
|
||||
infoMap[infokey] = int(row[0])
|
||||
return infoMap
|
||||
|
||||
def nextGridInfoSeq():
|
||||
if len(gridinfo_seq) == 0:
|
||||
# The number of ids we need per model varies wildly from 1 to 263,
|
||||
# but on average 50 per model will grab enough ids that we don't
|
||||
# ever need to go back to the db, although if we do it's not really
|
||||
# a big deal, this is just trying to avoid excessive trips back
|
||||
n = max(len(models),1)*50
|
||||
for row in queryPostgres("select nextval('gridinfo_seq') from generate_series(1,%d);" % (n)):
|
||||
gridinfo_seq.append(int(row[0]))
|
||||
return gridinfo_seq.pop()
|
||||
|
||||
def convertPert(pert):
|
||||
if pert == "1":
|
||||
return "ctl1"
|
||||
elif pert == "2":
|
||||
return "ctl2"
|
||||
elif pert == "3":
|
||||
return "n1"
|
||||
elif pert == "4":
|
||||
return "p1"
|
||||
elif pert == "5":
|
||||
return "n2"
|
||||
elif pert == "6":
|
||||
return "p2"
|
||||
elif pert == "7":
|
||||
return "n3"
|
||||
elif pert == "8":
|
||||
return "p3"
|
||||
elif pert == "9":
|
||||
return "n4"
|
||||
elif pert == "10":
|
||||
return "p4"
|
||||
elif pert == "11":
|
||||
return "n5"
|
||||
elif pert == "12":
|
||||
return "p5"
|
||||
return "null"
|
||||
|
||||
def copyH5(h5, gribdatauri, griddatauri, dataset="Data"):
|
||||
gribgrp = h5['/']
|
||||
gridgrp = gribgrp
|
||||
for part in gribdatauri.split('/'):
|
||||
if part:
|
||||
gribgrp = gribgrp[part]
|
||||
for part in griddatauri.split('/'):
|
||||
if part:
|
||||
gridgrp = gridgrp.require_group(part)
|
||||
if not(dataset in gridgrp.keys()):
|
||||
plists = {'lcpl': gribgrp[dataset]._lcpl, 'lapl': gribgrp[dataset]._lapl}
|
||||
plists['lcpl'].set_create_intermediate_group(False)
|
||||
h5py.h5o.link(gribgrp[dataset].id, gridgrp.id, dataset, **plists)
|
||||
|
||||
|
||||
def processAllParameters():
|
||||
print "Populating parameter table from grib_models"
|
||||
sql = None
|
||||
c = 0
|
||||
for row in queryPostgres("select distinct abbreviation, name, unit from parameter"):
|
||||
p = {"abbreviation":row[0], "name":row[1], "unit":row[2]}
|
||||
parameters[row[0]] = p
|
||||
for row in queryPostgres("select distinct parameterabbreviation, parametername, parameterunit from grib_models"):
|
||||
if row[0] in parameters:
|
||||
continue
|
||||
p = {"abbreviation":row[0], "name":row[1], "unit":row[2]}
|
||||
parameters[row[0]] = p
|
||||
if sql == None:
|
||||
sql = "insert into parameter (abbreviation, name, unit) values "
|
||||
else:
|
||||
sql = sql + ", "
|
||||
c += 1
|
||||
sql = sql + ("('%s', '%s', '%s')" % (row[0], row[1], row[2]))
|
||||
if sql != None:
|
||||
queryPostgres(sql)
|
||||
print "Done populating parameter table, %d new rows added" % (c)
|
||||
|
||||
def processModels():
|
||||
while(True):
|
||||
models_lock.acquire()
|
||||
if len(models) == 0:
|
||||
global numThreads
|
||||
numThreads -= 1
|
||||
models_lock.release()
|
||||
break
|
||||
model = models.pop()
|
||||
models_lock.release()
|
||||
try:
|
||||
convertModel(model)
|
||||
except:
|
||||
print model, "Error model aborted"
|
||||
print sys.exc_info()[1]
|
||||
|
||||
def loadAll():
|
||||
global models
|
||||
print "This script will convert grib data in edex to use the new grid format"
|
||||
print "You provided no arguments so this will convert almost all data."
|
||||
print "To convert only specific models you can cancel and list models as arguments"
|
||||
print ""
|
||||
for row in queryPostgres("select distinct modelname from grib_models"):
|
||||
models.append(row[0])
|
||||
print "To save time some grid models will be skipped, these grids will not be"
|
||||
print "available until the next model run is ingested. If you would like to convert any"
|
||||
print "of these models simply run the conversion script again with a list of models as arguments."
|
||||
print ""
|
||||
bad = []
|
||||
good = []
|
||||
for model in models:
|
||||
if model.startswith("UnknownModel"):
|
||||
bad.append(model)
|
||||
else:
|
||||
good.append(model)
|
||||
if len(bad) > 0:
|
||||
print "These Unknown Models will not be converted:",
|
||||
for model in bad:
|
||||
print "\"" + model + "\"",
|
||||
print ""
|
||||
print ""
|
||||
models = good
|
||||
for key in blacklistGrids:
|
||||
blacklist = blacklistGrids[key]
|
||||
bad = []
|
||||
good = []
|
||||
for model in models:
|
||||
if model in blacklist:
|
||||
bad.append(model)
|
||||
else:
|
||||
good.append(model)
|
||||
if len(bad) > 0:
|
||||
print "These " + key + " will not be converted:",
|
||||
for model in bad:
|
||||
print "\"" + model + "\"",
|
||||
print ""
|
||||
print ""
|
||||
models = good
|
||||
print "To continue converting the data Press Enter or Ctrl-C to cancel."
|
||||
raw_input()
|
||||
|
||||
def check_table(tablename):
|
||||
rows = queryPostgres("SELECT count(*) FROM information_schema.tables WHERE table_name = '" + tablename + "';")
|
||||
if(rows[0][0] != "1"):
|
||||
print tablename, "table does not exist, please create tables"
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
t = time()
|
||||
check_table("grid")
|
||||
check_table("grid_info")
|
||||
check_table("parameter")
|
||||
if len(sys.argv) == 1:
|
||||
loadAll()
|
||||
else:
|
||||
for i in range(1,len(sys.argv)):
|
||||
models.append(sys.argv[i])
|
||||
processAllParameters()
|
||||
print "Starting %d threads to process models" % (numThreads)
|
||||
for i in range(numThreads-1):
|
||||
start_new_thread(processModels, ())
|
||||
processModels()
|
||||
while numThreads > 0:
|
||||
sleep(5)
|
||||
print "Total Conversion time %ds" % (time() - t)
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will copy any grib purge rules to a equivalent grid purge rules file
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`find /awips2/edex/data/utility/common_static/site/*/purge/gribPurgeRules.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No site level grib purge files found!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
nf=${f/grib/grid}
|
||||
echo Copying $f to $nf
|
||||
cp $f $nf
|
||||
# level
|
||||
sed -n 's/modelInfo\.level\.levelonevalue=/info.level.levelonevalue=/g;p;' -i $nf
|
||||
sed -n 's/modelInfo\.level\.leveltwovalue=/info.level.leveltwovalue=/g;p;' -i $nf
|
||||
sed -n 's/modelInfo\.level\.masterLevel.name=/info.level.masterLevel.name=/g;p;' -i $nf
|
||||
# parameter
|
||||
sed -n 's/modelInfo\.parameterAbbreviation=/info.parameter.abbreviation=/g;p;' -i $nf
|
||||
# dataset
|
||||
sed -n 's/modelInfo\.modelName=/info.datasetId=/g;p;' -i $nf
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,151 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add create tables for the grid plugin
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
GRID_COMMAND="CREATE TABLE grid
|
||||
(
|
||||
id integer NOT NULL,
|
||||
forecasttime integer,
|
||||
reftime timestamp without time zone,
|
||||
utilityflags character varying(255),
|
||||
rangeend timestamp without time zone,
|
||||
rangestart timestamp without time zone,
|
||||
datauri character varying(255),
|
||||
inserttime timestamp without time zone,
|
||||
info_id integer,
|
||||
CONSTRAINT grid_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fk308b46a3c100e9 FOREIGN KEY (info_id)
|
||||
REFERENCES grid_info (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE NO ACTION,
|
||||
CONSTRAINT grid_datauri_key UNIQUE (datauri)
|
||||
)
|
||||
WITH (OIDS=FALSE);
|
||||
ALTER TABLE grid OWNER TO awips;
|
||||
|
||||
CREATE INDEX "gridDatasetReftime_idx"
|
||||
ON grid
|
||||
USING btree
|
||||
(info_id, reftime, forecasttime);
|
||||
|
||||
CREATE INDEX gridpersistableplugindataobjectdatauri_idx
|
||||
ON grid
|
||||
USING btree
|
||||
(datauri);
|
||||
|
||||
CREATE INDEX gridpersistableplugindataobjectfcsttimeindex
|
||||
ON grid
|
||||
USING btree
|
||||
(forecasttime);
|
||||
|
||||
CREATE INDEX gridpersistableplugindataobjectinserttimeindex
|
||||
ON grid
|
||||
USING btree
|
||||
(inserttime);
|
||||
|
||||
CREATE INDEX gridpersistableplugindataobjectreftimeindex
|
||||
ON grid
|
||||
USING btree
|
||||
(reftime);
|
||||
"
|
||||
|
||||
INFO_SEQ_COMMAND="CREATE SEQUENCE gridinfo_seq
|
||||
INCREMENT 1
|
||||
MINVALUE 1
|
||||
MAXVALUE 9223372036854775807
|
||||
START 91
|
||||
CACHE 1;
|
||||
ALTER TABLE gridinfo_seq OWNER TO awips;"
|
||||
|
||||
INFO_COMMAND="CREATE TABLE grid_info
|
||||
(
|
||||
id integer NOT NULL,
|
||||
datasetid character varying(255),
|
||||
ensembleid character varying(255),
|
||||
secondaryid character varying(255),
|
||||
level_id bigint,
|
||||
location_id integer,
|
||||
parameter_abbreviation character varying(255),
|
||||
CONSTRAINT grid_info_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fk4c4dae072d36f480 FOREIGN KEY (level_id)
|
||||
REFERENCES "level" (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE NO ACTION,
|
||||
CONSTRAINT fk4c4dae0746b2bf12 FOREIGN KEY (location_id)
|
||||
REFERENCES gridcoverage (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE NO ACTION,
|
||||
CONSTRAINT fk4c4dae076765a9e7 FOREIGN KEY (parameter_abbreviation)
|
||||
REFERENCES parameter (abbreviation) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION ON DELETE NO ACTION
|
||||
)
|
||||
WITH (OIDS=FALSE);
|
||||
ALTER TABLE grid_info OWNER TO awips;
|
||||
|
||||
CREATE INDEX "gridinfoNameParamLevel_idx"
|
||||
ON grid_info
|
||||
USING btree
|
||||
(datasetid, parameter_abbreviation, level_id);
|
||||
|
||||
CREATE INDEX "gridinfoSecondryId_idx"
|
||||
ON grid_info
|
||||
USING btree
|
||||
(secondaryid);"
|
||||
|
||||
PARAM_COMMAND="CREATE TABLE parameter
|
||||
(
|
||||
abbreviation character varying(255) NOT NULL,
|
||||
"name" character varying(255) NOT NULL,
|
||||
unit character varying(255),
|
||||
CONSTRAINT parameter_pkey PRIMARY KEY (abbreviation)
|
||||
)
|
||||
WITH (OIDS=FALSE);
|
||||
ALTER TABLE parameter OWNER TO awips;"
|
||||
|
||||
SQL_COMMAND_REGISTER="insert into plugin_info (name, database, initialized, tablename) VALUES('grid', 'metadata', TRUE, 'grid'), ('parameter', 'metadata', TRUE, 'parameter');"
|
||||
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${PARAM_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${INFO_SEQ_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${INFO_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${GRID_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_REGISTER}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,67 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add register the gridcoverage plugin, which was previously part of grib
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND_CHECK="select * FROM gridcoverage LIMIT 1;"
|
||||
SQL_COMMAND_REGISTER="insert into plugin_info (name, database, initialized, tablename) VALUES('gridcoverage', 'metadata', TRUE, 'gridcoverage');"
|
||||
SQL_COMMAND_SEQ="CREATE SEQUENCE gridcoverage_seq INCREMENT 1 MINVALUE 1 MAXVALUE 9223372036854775807 START 1 CACHE 1; ALTER TABLE gridcoverage_seq OWNER TO awips;"
|
||||
SQL_COMMAND_UPDATE_ID="update gridcoverage set id=nextval('gridcoverage_seq');"
|
||||
SQL_COMMAND_ALTER_NAME_DESC="ALTER TABLE gridcoverage DROP COLUMN description, ALTER COLUMN name TYPE character varying(255);"
|
||||
SQL_COMMAND_UPDATE_NAME="update gridcoverage g1 set name = 'Subgrid-' || g2.id from gridcoverage g2 where g1.name like '%-SubGrid-%' and g2.name = split_part(g1.name, '-',1);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_CHECK}" > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: gridcoverage table does not exist so we are not registering the plugin"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_REGISTER}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_SEQ}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to create gridcoverage_seq"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FK=`${PSQL} -U awips -d metadata -c "\d grib_models" | grep gridcoverage | awk -F"\"" '{print $2}'`
|
||||
if [ -z "$FK" ]; then
|
||||
echo "FATAL: unable to find foreign key constraint on grib_models"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE grib_models DROP CONSTRAINT ${FK}, ADD CONSTRAINT ${FK} FOREIGN KEY (location_id) REFERENCES gridcoverage (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE NO ACTION;"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to modify foreign key constraint on grib_models"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_UPDATE_ID}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to update gridcoverage ids"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_ALTER_NAME_DESC}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: unable to remove description column from gridcoverage table"
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
exit 0
|
|
@ -1,30 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D bundle files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`find /awips2/edex/data/utility/cave_static/*/*/bundles/ -iname '*.xml'`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No bundle files found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
bash $MY_DIR/update_saved_display.sh $f
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D procedures files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`ls /awips2/edex/data/utility/cave_static/*/*/procedures/*.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No procedures found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
bash $MY_DIR/update_saved_display.sh $f
|
||||
done
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any FFMPSourceConfig.xml files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host FFMPSourceConfig.xml files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to perform the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
files=`find /awips2/edex/data/utility/common_static -iname FFMPSourceConfig.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
echo Updating $f
|
||||
bf=$f.bak.`date +%m%d%y`
|
||||
cp $f $bf
|
||||
# reconstruct data uris from grib to grid
|
||||
awk -F '/' '
|
||||
/<dataPath>\/grib/ {print $1 "/grid/" $3 "/" $4 "/.*/.*/.*/" $5 "/" $6 "/" $7 "/" $8 "/" $11 "/" $12; next;}
|
||||
{gsub(/<plugin>grib<\/plugin>/,"<plugin>grid</plugin>"); print; }
|
||||
' $bf > $f
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update finished successfully."
|
||||
exit 0
|
|
@ -1,55 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any saved displays from the grib format to the grid format
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update only needs to be run if there are saved displays being stored outside of localization.
|
||||
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Please provide a list of saved displays to update."
|
||||
fi
|
||||
|
||||
IFS=$'\n'
|
||||
|
||||
for f in "$@"; do
|
||||
echo Updating $f
|
||||
#bf=$f.bak.`date +%m%d%y`
|
||||
#cp $f $bf
|
||||
# its probably not efficient to execute sed 20 times but its not slow...
|
||||
# replace perturbationNumbers with ensmble ids
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2ctl1\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2ctl2\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)3\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2n1\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)4\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2p1\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)5\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2n2\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)6\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2p2\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)7\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2n3\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)8\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2p3\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)9\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2n4\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)10\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2p4\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)11\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2n5\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)modelInfo\.perturbationNumber\("\s*>\s*<\s*constraint\s\+constraintValue="\)12\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.ensembleId\2p5\3/g;p;}' -i $f
|
||||
# handle grid version
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)gridVersion\("\s*>\s*<\s*constraint\s\+constraintValue="\)\([0-9]\{1,2\}\)\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1info.secondaryId\2Version\3\4/g;p;}' -i $f
|
||||
# level
|
||||
sed -n 's/key="modelInfo\.level\.levelonevalue"/key="info.level.levelonevalue"/g;p;' -i $f
|
||||
sed -n 's/key="modelInfo\.level\.leveltwovalue"/key="info.level.leveltwovalue"/g;p;' -i $f
|
||||
sed -n 's/key="modelInfo\.level\.masterLevel.name"/key="info.level.masterLevel.name"/g;p;' -i $f
|
||||
# parameter
|
||||
sed -n 's/key="modelInfo\.parameterAbbreviation"/key="info.parameter.abbreviation"/g;p;' -i $f
|
||||
# dataset
|
||||
sed -n 's/key="modelInfo\.modelName"/key="info.datasetId"/g;p;' -i $f
|
||||
#plugin name
|
||||
sed -n 's/constraintValue="grib"/constraintValue="grid"/g;p;' -i $f
|
||||
|
||||
#Also need to map the productIdentifierKey in best res resource data.
|
||||
sed -n 's/productIdentifierKey="modelInfo\.level\.levelonevalue"/productIdentifierKey="info.level.levelonevalue"/g;p;' -i $f
|
||||
sed -n 's/productIdentifierKey="modelInfo\.level\.leveltwovalue"/productIdentifierKey="info.level.leveltwovalue"/g;p;' -i $f
|
||||
sed -n 's/productIdentifierKey="modelInfo\.level\.masterLevel.name"/productIdentifierKey="info.level.masterLevel.name"/g;p;' -i $f
|
||||
sed -n 's/productIdentifierKey="modelInfo\.modelName"/productIdentifierKey="info.datasetId"/g;p;' -i $f
|
||||
sed -n 's/productIdentifierKey="modelInfo\.parameterAbbreviation"/productIdentifierKey="info.parameter.abbreviation"/g;p;' -i $f
|
||||
|
||||
|
||||
#diff $f $bf > /dev/null
|
||||
#if [ $? -eq 0 ]; then rm $bf; echo "No Changes"; fi
|
||||
done
|
|
@ -1,252 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
# This script will add register the gridcoverage plugin, which was previosly part of grib
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
# create_grid_tables.sh must be run before running this script.
|
||||
|
||||
from shutil import copytree, move, copy
|
||||
from subprocess import Popen, PIPE
|
||||
from thread import start_new_thread, allocate_lock
|
||||
import sys
|
||||
from os.path import exists, isdir
|
||||
from os import mkdir
|
||||
from time import sleep, time
|
||||
import h5py
|
||||
import struct
|
||||
|
||||
# this is generally a disk bound process so more threads won't help unless the disk is fast
|
||||
numThreads = 1
|
||||
# setting too many records at once causes the exec to psql to fail because of the long arg list
|
||||
maxRecords = 200
|
||||
|
||||
postgresCmd = "psql -U awips -d metadata -t -q -A -c "
|
||||
hdf5loc = "/awips2/edex/data/hdf5/"
|
||||
|
||||
quadrantGrids = ["ENSEMBLE37", "ENSEMBLE38", "ENSEMBLE39", "ENSEMBLE40",
|
||||
"ECMF1", "ECMF2", "ECMF3", "ECMF4", "ECMF5", "ECMF6", "ECMF7", "ECMF8",
|
||||
"UKMET40", "UKMET39", "UKMET38", "UKMET37"]
|
||||
|
||||
akGrids = ["MOSGuide-AK", "AK-NamDNG5","AK-RTMA", "AKWAVE239", "AKwave10", "AKwave4", "HiResW-NMM-AK", "HiResW-ARW-AK",
|
||||
"ETA242", "mesoEta217", "mesoEta216","ETA207", "AVN203", "MRF203", "GFS160"]
|
||||
|
||||
prGrids = ["HiResW-NMM-SJU", "HiResW-ARW-SJU", "PR-NamDNG5", "PR-RTMA", "MRF205", "GFS161", "mesoEta237"]
|
||||
|
||||
hiGrids = ["HI-NamDNG5", "HI-RTMA", "HiResW-NMM-HI", "HiResW-ARW-HI", "MRF204", "AVN225", "GFS254", "SREF243"]
|
||||
|
||||
guGrids = ["HiResW-NMM-GU", "HiResW-ARW-GU"]
|
||||
|
||||
blacklistGrids = {"quadrant grids which have already been converted in an assembled format":quadrantGrids,
|
||||
"grids over Alaska":akGrids, "grids over Puerto Rico":prGrids,
|
||||
"grids over Hawaii and the Pacific Region":hiGrids, "grids over Guam":guGrids}
|
||||
|
||||
|
||||
parameters = {}
|
||||
models = []
|
||||
models_lock = allocate_lock()
|
||||
|
||||
|
||||
def queryPostgres(sql):
|
||||
result = Popen(postgresCmd + "\"" + sql + "\"", stdout=PIPE, shell=True)
|
||||
retVal = []
|
||||
for line in result.stdout:
|
||||
retVal.append(line.strip().split("|"))
|
||||
return retVal
|
||||
|
||||
def convertModel(modelName):
|
||||
hdfTime = 0
|
||||
totTime = 0
|
||||
totTime -= time()
|
||||
print modelName, "Loading existing grid_info"
|
||||
print modelName, "Querying grib database"
|
||||
rows = queryPostgres("select grib.forecasttime, grib.reftime, grib.datauri, gridcoverage.id from grib, grib_models, gridcoverage, level where grib.modelinfo_id = grib_models.id and grib_models.location_id = gridcoverage.id and grib_models.level_id = level.id and grib_models.modelName = '%s' order by grib.forecasttime, grib.reftime, level.masterlevel_name" % modelName)
|
||||
print modelName, "Converting %d records" % len(rows)
|
||||
gridSql = None
|
||||
lastFile = None
|
||||
gribFiles = hdf5loc + "grib/" + modelName + "/"
|
||||
gridFiles = hdf5loc + "grid/" + modelName + "/"
|
||||
if not(isdir(hdf5loc + "grib/")):
|
||||
mkdir(hdf5loc + "grib/")
|
||||
if not(isdir(gribFiles)):
|
||||
mkdir(gribFiles)
|
||||
count = 0;
|
||||
for row in rows:
|
||||
gribforecasttime = row[0]
|
||||
gribreftime = row[1]
|
||||
gribdatauri = row[2]
|
||||
gridcoverageid = row[3]
|
||||
datauriparts = gribdatauri.split("/")
|
||||
datatime = datauriparts[2]
|
||||
paramabbrev = datauriparts[4]
|
||||
masterlevel = datauriparts[5]
|
||||
levelone = datauriparts[6]
|
||||
leveltwo = datauriparts[7]
|
||||
pert = datauriparts[9]
|
||||
version = datauriparts[10]
|
||||
secondaryId = "null"
|
||||
if version != "0":
|
||||
secondaryId = "Version" + version
|
||||
ensembleId = convertPert(pert)
|
||||
newdatauri = "/grid/" + datatime + "/" + modelName + "/" + secondaryId + "/" + ensembleId + "/" + gridcoverageid + "/" + paramabbrev + "/" + masterlevel + "/" + levelone + "/" + leveltwo
|
||||
hdfTime -= time()
|
||||
try:
|
||||
forecast = int(gribforecasttime)/3600
|
||||
prevgrp = gribdatauri
|
||||
newgrp = newdatauri
|
||||
dataset="Data"
|
||||
if paramabbrev.startswith("static"):
|
||||
prevgrp = "/"+ gridcoverageid
|
||||
newgrp = "/"
|
||||
dataset=paramabbrev
|
||||
if not(paramabbrev.startswith("static")) or forecast == 0:
|
||||
filebase = "/%s-%s-FH-%.3d.h5" % (modelName, gribreftime.split(":")[0].replace(" ", "-"), forecast)
|
||||
hdf5file = gribFiles + masterlevel + filebase
|
||||
if lastFile != None and lastFile.filename != hdf5file:
|
||||
#print "Closing", lastFile.filename
|
||||
lastFile.close()
|
||||
lastFile = None
|
||||
if lastFile == None:
|
||||
if not(exists(hdf5file)):
|
||||
t0 = time()
|
||||
if not(isdir(gribFiles + masterlevel)):
|
||||
mkdir(gribFiles + masterlevel)
|
||||
move(gridFiles + masterlevel + filebase, gribFiles + masterlevel)
|
||||
hdfTime -= (time() - t0)
|
||||
#print "Opening", hdf5file
|
||||
lastFile = h5py.File(hdf5file)
|
||||
copyH5(lastFile, newgrp, prevgrp, dataset)
|
||||
except:
|
||||
print modelName, "Error", gribdatauri
|
||||
print sys.exc_info()[1]
|
||||
hdfTime += time()
|
||||
continue
|
||||
hdfTime += time()
|
||||
count += 1
|
||||
if count % maxRecords == 0:
|
||||
print modelName, "Processed %d grid records %d%%" % (maxRecords,100*count/len(rows))
|
||||
totTime += time()
|
||||
print modelName, "Time in hdf5 links = %ds" % (hdfTime)
|
||||
print modelName, "Total process Time = %ds" % (totTime)
|
||||
|
||||
def convertPert(pert):
|
||||
if pert == "1":
|
||||
return "ctl1"
|
||||
elif pert == "2":
|
||||
return "ctl2"
|
||||
elif pert == "3":
|
||||
return "n1"
|
||||
elif pert == "4":
|
||||
return "p1"
|
||||
elif pert == "5":
|
||||
return "n2"
|
||||
elif pert == "6":
|
||||
return "p2"
|
||||
elif pert == "7":
|
||||
return "n3"
|
||||
elif pert == "8":
|
||||
return "p3"
|
||||
elif pert == "9":
|
||||
return "n4"
|
||||
elif pert == "10":
|
||||
return "p4"
|
||||
elif pert == "11":
|
||||
return "n5"
|
||||
elif pert == "12":
|
||||
return "p5"
|
||||
return "null"
|
||||
|
||||
def copyH5(h5, gribdatauri, griddatauri, dataset="Data"):
|
||||
gribgrp = h5['/']
|
||||
gridgrp = gribgrp
|
||||
for part in gribdatauri.split('/'):
|
||||
if part:
|
||||
gribgrp = gribgrp[part]
|
||||
for part in griddatauri.split('/'):
|
||||
if part:
|
||||
gridgrp = gridgrp.require_group(part)
|
||||
if not(dataset in gridgrp.keys()):
|
||||
plists = {'lcpl': gribgrp[dataset]._lcpl, 'lapl': gribgrp[dataset]._lapl}
|
||||
plists['lcpl'].set_create_intermediate_group(False)
|
||||
h5py.h5o.link(gribgrp[dataset].id, gridgrp.id, dataset, **plists)
|
||||
|
||||
def processModels():
|
||||
while(True):
|
||||
models_lock.acquire()
|
||||
if len(models) == 0:
|
||||
global numThreads
|
||||
numThreads -= 1
|
||||
models_lock.release()
|
||||
break
|
||||
model = models.pop()
|
||||
models_lock.release()
|
||||
try:
|
||||
convertModel(model)
|
||||
except:
|
||||
print model, "Error model aborted"
|
||||
print sys.exc_info()[1]
|
||||
|
||||
def loadAll():
|
||||
global models
|
||||
print "This script will convert grid data in edex to use the old grib format"
|
||||
print "You provided no arguments so this will convert almost all data."
|
||||
print "To convert only specific models you can cancel and list models as arguments"
|
||||
print ""
|
||||
for row in queryPostgres("select distinct modelname from grib_models"):
|
||||
models.append(row[0])
|
||||
print "To save time some grid models will be skipped, these grids will not be"
|
||||
print "available until the next model run is ingested. If you would like to convert any"
|
||||
print "of these models simply run the conversion script again with a list of models as arguments."
|
||||
print ""
|
||||
bad = []
|
||||
good = []
|
||||
for model in models:
|
||||
if model.startswith("UnknownModel"):
|
||||
bad.append(model)
|
||||
else:
|
||||
good.append(model)
|
||||
if len(bad) > 0:
|
||||
print "These Unknown Models will not be converted:",
|
||||
for model in bad:
|
||||
print "\"" + model + "\"",
|
||||
print ""
|
||||
print ""
|
||||
models = good
|
||||
for key in blacklistGrids:
|
||||
blacklist = blacklistGrids[key]
|
||||
bad = []
|
||||
good = []
|
||||
for model in models:
|
||||
if model in blacklist:
|
||||
bad.append(model)
|
||||
else:
|
||||
good.append(model)
|
||||
if len(bad) > 0:
|
||||
print "These " + key + " will not be converted:",
|
||||
for model in bad:
|
||||
print "\"" + model + "\"",
|
||||
print ""
|
||||
print ""
|
||||
models = good
|
||||
print "To continue converting the data Press Enter or Ctrl-C to cancel."
|
||||
raw_input()
|
||||
|
||||
def check_table(tablename):
|
||||
rows = queryPostgres("SELECT count(*) FROM information_schema.tables WHERE table_name = '" + tablename + "';")
|
||||
if(rows[0][0] != "1"):
|
||||
print tablename, "table does not exist, please create tables"
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
t = time()
|
||||
check_table("grib")
|
||||
if len(sys.argv) == 1:
|
||||
loadAll()
|
||||
else:
|
||||
for i in range(1,len(sys.argv)):
|
||||
models.append(sys.argv[i])
|
||||
print "Starting %d threads to process models" % (numThreads)
|
||||
for i in range(numThreads-1):
|
||||
start_new_thread(processModels, ())
|
||||
processModels()
|
||||
while numThreads > 0:
|
||||
sleep(5)
|
||||
print "Total Conversion time %ds" % (time() - t)
|
|
@ -1,24 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will copy any grib purge rules to a equivalent grid purge rules file
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`find /awips2/edex/data/utility/common_static/site/*/purge/gridPurgeRules.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No site level grid purge files found!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
echo Deleting $f
|
||||
rm $f
|
||||
done
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
|
@ -1,58 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add create tables for the grid plugin
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
GRID_COMMAND="DROP TABLE grid;"
|
||||
INFO_SEQ_COMMAND="DROP SEQUENCE gridinfo_seq;"
|
||||
INFO_COMMAND="DROP TABLE grid_info;"
|
||||
PARAM_COMMAND="DROP TABLE parameter;"
|
||||
SQL_COMMAND_REGISTER="delete from plugin_info where name = 'grid' OR name = 'parameter';"
|
||||
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_REGISTER}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${GRID_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${INFO_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${INFO_SEQ_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${PARAM_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
|
@ -1,54 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will add register the gridcoverage plugin, which was previously part of grib
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND_CHECK="select * FROM gridcoverage LIMIT 1;"
|
||||
SQL_COMMAND_REGISTER="delete from plugin_info where name = 'gridcoverage';"
|
||||
SQL_COMMAND_SEQ="DROP SEQUENCE gridcoverage_seq;"
|
||||
SQL_COMMAND_UPDATE_ID="update gridcoverage set id=id*10000;"
|
||||
SQL_COMMAND_ALTER_NAME_DESC="ALTER TABLE gridcoverage ADD COLUMN description character varying(3071), ALTER COLUMN name TYPE character varying(2047);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_CHECK}" > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: gridcoverage table does not exist so we are not registering the plugin"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_REGISTER}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_SEQ}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to delete gridcoverage_seq"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_UPDATE_ID}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to update gridcoverage ids"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_ALTER_NAME_DESC}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: unable to add description column to gridcoverage table"
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
|
@ -1,30 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D bundle files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`find /awips2/edex/data/utility/cave_static/*/*/bundles/ -iname '*.xml'`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No bundle files found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
bash $MY_DIR/update_saved_display.sh $f
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D procedures files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`ls /awips2/edex/data/utility/cave_static/*/*/procedures/*.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No procedures found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
bash $MY_DIR/update_saved_display.sh $f
|
||||
done
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
|
@ -1,33 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any FFMPSourceConfig.xml files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host FFMPSourceConfig.xml files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
files=`find /awips2/edex/data/utility/common_static -iname FFMPSourceConfig.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
echo Updating $f
|
||||
bf=$f.bak.`date +%m%d%y`
|
||||
cp $f $bf
|
||||
# reconstruct data uris from grib to grid
|
||||
awk -F '/' '
|
||||
/<dataPath>\/grid/ {print $1 "/grib/" $3 "/" $4 "/" $8 "/" $9 "/" $10 "/" $11 "/null/null/" $12 "/" $13; next;}
|
||||
{gsub(/<plugin>grid<\/plugin>/,"<plugin>grib</plugin>"); print; }
|
||||
' $bf > $f
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
|
@ -1,46 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script will update any saved displays from the grib format to the grid format
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update only needs to be run if there are saved displays being stored outside of localization.
|
||||
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Please provide a list of saved displays to update."
|
||||
fi
|
||||
|
||||
IFS=$'\n'
|
||||
|
||||
for f in "$@"; do
|
||||
echo Updating $f
|
||||
#bf=$f.bak.`date +%m%d%y`
|
||||
#cp $f $bf
|
||||
# its probably not efficient to execute sed 20 times but its not slow...
|
||||
# replace perturbationNumbers with ensmble ids
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)ctl1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\21\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)ctl2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\22\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\23\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\24\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\25\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\26\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n3\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\27\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p3\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\28\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n4\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\29\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p4\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\210\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n5\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\211\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p5\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\212\3/g;p;}' -i $f
|
||||
# handle grid version
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.secondaryId\("\s*>\s*<\s*constraint\s\+constraintValue="\)Version\([0-9]\{1,2\}\)\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1gridVersion\2\3\4/g;p;}' -i $f
|
||||
# level
|
||||
sed -n 's/key="info\.level\.levelonevalue"/key="modelInfo.level.levelonevalue"/g;p;' -i $f
|
||||
sed -n 's/key="info\.level\.leveltwovalue"/key="modelInfo.level.leveltwovalue"/g;p;' -i $f
|
||||
sed -n 's/key="info\.level\.masterLevel.name"/key="modelInfo.level.masterLevel.name"/g;p;' -i $f
|
||||
# parameter
|
||||
sed -n 's/key="info\.parameter.abbreviation"/key="modelInfo.parameterAbbreviation"/g;p;' -i $f
|
||||
# dataset
|
||||
sed -n 's/key="info\.datasetId"/key="modelInfo.modelName"/g;p;' -i $f
|
||||
#plugin name
|
||||
sed -n 's/constraintValue="grid"/constraintValue="grib"/g;p;' -i $f
|
||||
#diff $f $bf > /dev/null
|
||||
#if [ $? -eq 0 ]; then rm $bf; echo "No Changes"; fi
|
||||
done
|
Loading…
Add table
Reference in a new issue