Issue #189 grid rollback scripts
Change-Id: I3528a978c649827578ae3488a5e2f67ae075e984 Former-commit-id:106c178ce7
[formerly106c178ce7
[formerly 8dc280e92f0512e05bac6fe01b33a8c21fc6b4f3]] Former-commit-id:b894cabe78
Former-commit-id:66ed210fea
This commit is contained in:
parent
9ba9377361
commit
ca871da8f8
11 changed files with 551 additions and 48 deletions
|
@ -42,6 +42,7 @@ blacklistGrids = {"quadrant grids which have already been converted in an assemb
|
|||
|
||||
parameters = {}
|
||||
models = []
|
||||
gridinfo_seq = []
|
||||
models_lock = allocate_lock()
|
||||
|
||||
|
||||
|
@ -58,7 +59,7 @@ def convertModel(modelName):
|
|||
totTime = 0
|
||||
totTime -= time()
|
||||
print modelName, "Loading existing grid_info"
|
||||
infoSet = loadGridInfo(modelName)
|
||||
infoMap = loadGridInfo(modelName)
|
||||
infoSql = None
|
||||
print modelName, "Querying grib database"
|
||||
rows = queryPostgres("select grib.forecasttime, grib.reftime, grib.utilityflags, grib.rangeend,grib.rangestart, grib.inserttime, grib.datauri, gridcoverage.id, grib_models.level_id, grib_models.location_id from grib, grib_models, gridcoverage where grib.modelinfo_id = grib_models.id and grib_models.location_id = gridcoverage.id and grib_models.modelName = '%s' order by grib.forecasttime, grib.reftime" % modelName)
|
||||
|
@ -118,7 +119,7 @@ def convertModel(modelName):
|
|||
if lastFile == None:
|
||||
if not(exists(hdf5file)):
|
||||
t0 = time()
|
||||
copy(gribFiles+filebase, gridFiles)
|
||||
move(gribFiles+filebase, gridFiles)
|
||||
hdfTime -= (time() - t0)
|
||||
#print "Opening", hdf5file
|
||||
lastFile = h5py.File(hdf5file)
|
||||
|
@ -129,8 +130,11 @@ def convertModel(modelName):
|
|||
hdfTime += time()
|
||||
continue
|
||||
hdfTime += time()
|
||||
infoid = javaGridInfoHashCode(modelName, ensembleId, float(levelone), float(leveltwo), masterlevel, int(gribmodelslocationid), paramabbrev, secondaryId)
|
||||
if not(infoid in infoSet):
|
||||
infokey = modelName + ":::" + secondaryId + ":::" + ensembleId + ":::" + gribmodelslevelid + ":::" + gribmodelslocationid + ":::" + paramabbrev
|
||||
infoid = infoMap.get(infokey)
|
||||
if infoid == None:
|
||||
infoid = nextGridInfoSeq()
|
||||
infoMap[infokey] = infoid
|
||||
if secondaryId == "null":
|
||||
secondaryId = "NULL"
|
||||
else:
|
||||
|
@ -144,7 +148,6 @@ def convertModel(modelName):
|
|||
else:
|
||||
infoSql = infoSql + ", "
|
||||
infoSql = infoSql + ("(%d, '%s', %s, %s, %s, %s, '%s')" % (infoid, modelName, secondaryId, ensembleId, gribmodelslevelid, gribmodelslocationid, paramabbrev))
|
||||
infoSet.add(infoid)
|
||||
if gridSql == None:
|
||||
gridSql = "insert into grid (id, forecasttime, reftime, utilityflags, rangeend, rangestart, datauri, inserttime, info_id) values "
|
||||
else:
|
||||
|
@ -180,10 +183,22 @@ def convertModel(modelName):
|
|||
print modelName, "Total process Time = %ds" % (totTime)
|
||||
|
||||
def loadGridInfo(modelName):
|
||||
infoSet = set()
|
||||
for row in queryPostgres("select distinct id from grid_info where datasetid = '%s'" % (modelName)):
|
||||
infoSet.add(int(row[0]))
|
||||
return infoSet
|
||||
infoMap = {}
|
||||
for row in queryPostgres("select distinct id, datasetid, secondaryid, ensembleid, level_id, location_id, parameter_abbreviation, id from grid_info where datasetid = '%s'" % (modelName)):
|
||||
infokey = row[1] + ":::" + row[2] + ":::" + row[3] + ":::" + row[4] + ":::" + row[5] + ":::" + row[6]
|
||||
infoMap[infokey] = int(row[0])
|
||||
return infoMap
|
||||
|
||||
def nextGridInfoSeq():
|
||||
if len(gridinfo_seq) == 0:
|
||||
# The number of ids we need per model varies wildly from 1 to 263,
|
||||
# but on average 50 per model will grab enough ids that we don't
|
||||
# ever need to go back to the db, although if we do it's not really
|
||||
# a big deal, this is just trying to avoid excessive trips back
|
||||
n = max(len(models),1)*50
|
||||
for row in queryPostgres("select nextval('gridinfo_seq') from generate_series(1,%d);" % (n)):
|
||||
gridinfo_seq.append(int(row[0]))
|
||||
return gridinfo_seq.pop()
|
||||
|
||||
def convertPert(pert):
|
||||
if pert == "1":
|
||||
|
@ -221,46 +236,11 @@ def copyH5(h5, gribdatauri, griddatauri, dataset="Data"):
|
|||
for part in griddatauri.split('/'):
|
||||
if part:
|
||||
gridgrp = gridgrp.require_group(part)
|
||||
if dataset == "Data" or not(dataset in gridgrp.keys()):
|
||||
if not(dataset in gridgrp.keys()):
|
||||
plists = {'lcpl': gribgrp[dataset]._lcpl, 'lapl': gribgrp[dataset]._lapl}
|
||||
plists['lcpl'].set_create_intermediate_group(False)
|
||||
h5py.h5o.link(gribgrp[dataset].id, gridgrp.id, dataset, **plists)
|
||||
|
||||
def pyint2int32(pyint):
|
||||
return struct.unpack('ii', struct.pack('q', pyint))[0]
|
||||
|
||||
def javaDoubleHashCode(d):
|
||||
v = struct.unpack('ii', struct.pack('d', d))
|
||||
return v[0] ^ v[1]
|
||||
|
||||
def javaStringHashCode(s):
|
||||
if(s == "null"):
|
||||
return 0;
|
||||
h = 0
|
||||
for c in s:
|
||||
h = pyint2int32(31*h + ord(c))
|
||||
return h
|
||||
|
||||
def javaGridInfoHashCode(datasetId, ensembleId, levelOne, levelTwo, masterLevel, locationid, parameterabbreviation, secondaryId):
|
||||
levelhash = 1
|
||||
levelhash = pyint2int32(31 * levelhash + (31 + javaStringHashCode(masterLevel)))
|
||||
levelhash = pyint2int32(31 * levelhash + javaDoubleHashCode(levelOne))
|
||||
levelhash = pyint2int32(31 * levelhash + javaDoubleHashCode(levelTwo))
|
||||
|
||||
p = parameters[parameterabbreviation]
|
||||
parameterhash = 1
|
||||
parameterhash = pyint2int32(31*parameterhash + javaStringHashCode(p["abbreviation"]))
|
||||
parameterhash = pyint2int32(31*parameterhash + javaStringHashCode(p["name"]))
|
||||
parameterhash = pyint2int32(31*parameterhash + javaStringHashCode(p["unit"]))
|
||||
|
||||
h = 1
|
||||
h = pyint2int32(31*h + javaStringHashCode(datasetId))
|
||||
h = pyint2int32(31*h + javaStringHashCode(ensembleId))
|
||||
h = pyint2int32(31 * h + levelhash)
|
||||
h = pyint2int32(31 * h + locationid)
|
||||
h = pyint2int32(31 * h + parameterhash)
|
||||
h = pyint2int32(31*h + javaStringHashCode(secondaryId))
|
||||
return h
|
||||
|
||||
def processAllParameters():
|
||||
print "Populating parameter table from grib_models"
|
||||
|
|
|
@ -47,19 +47,19 @@ exit 1
|
|||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "ALTER TABLE grib_models DROP CONSTRAINT ${FK}, ADD CONSTRAINT ${FK} FOREIGN KEY (location_id) REFERENCES gridcoverage (id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE NO ACTION;"
|
||||
if [ -z "$FK" ]; then
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to modify foreign key constraint on grib_models"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_UPDATE_ID}"
|
||||
if [ -z "$FK" ]; then
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to update gridcoverage ids"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_ALTER_NAME_DESC}"
|
||||
if [ -z "$FK" ]; then
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: unable to remove description column from gridcoverage table"
|
||||
fi
|
||||
|
||||
|
|
252
deltaScripts/unified_grid_rollback/convert_grib_data.py
Normal file
252
deltaScripts/unified_grid_rollback/convert_grib_data.py
Normal file
|
@ -0,0 +1,252 @@
|
|||
#!/usr/bin/python
|
||||
# This script will add register the gridcoverage plugin, which was previosly part of grib
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
# create_grid_tables.sh must be run before running this script.
|
||||
|
||||
from shutil import copytree, move, copy
|
||||
from subprocess import Popen, PIPE
|
||||
from thread import start_new_thread, allocate_lock
|
||||
import sys
|
||||
from os.path import exists, isdir
|
||||
from os import mkdir
|
||||
from time import sleep, time
|
||||
import h5py
|
||||
import struct
|
||||
|
||||
# this is generally a disk bound process so more threads won't help unless the disk is fast
|
||||
numThreads = 1
|
||||
# setting too many records at once causes the exec to psql to fail because of the long arg list
|
||||
maxRecords = 200
|
||||
|
||||
postgresCmd = "psql -U awips -d metadata -t -q -A -c "
|
||||
hdf5loc = "/awips2/edex/data/hdf5/"
|
||||
|
||||
quadrantGrids = ["ENSEMBLE37", "ENSEMBLE38", "ENSEMBLE39", "ENSEMBLE40",
|
||||
"ECMF1", "ECMF2", "ECMF3", "ECMF4", "ECMF5", "ECMF6", "ECMF7", "ECMF8",
|
||||
"UKMET40", "UKMET39", "UKMET38", "UKMET37"]
|
||||
|
||||
akGrids = ["MOSGuide-AK", "AK-NamDNG5","AK-RTMA", "AKWAVE239", "AKwave10", "AKwave4", "HiResW-NMM-AK", "HiResW-ARW-AK",
|
||||
"ETA242", "mesoEta217", "mesoEta216","ETA207", "AVN203", "MRF203", "GFS160"]
|
||||
|
||||
prGrids = ["HiResW-NMM-SJU", "HiResW-ARW-SJU", "PR-NamDNG5", "PR-RTMA", "MRF205", "GFS161", "mesoEta237"]
|
||||
|
||||
hiGrids = ["HI-NamDNG5", "HI-RTMA", "HiResW-NMM-HI", "HiResW-ARW-HI", "MRF204", "AVN225", "GFS254", "SREF243"]
|
||||
|
||||
guGrids = ["HiResW-NMM-GU", "HiResW-ARW-GU"]
|
||||
|
||||
blacklistGrids = {"quadrant grids which have already been converted in an assembled format":quadrantGrids,
|
||||
"grids over Alaska":akGrids, "grids over Puerto Rico":prGrids,
|
||||
"grids over Hawaii and the Pacific Region":hiGrids, "grids over Guam":guGrids}
|
||||
|
||||
|
||||
parameters = {}
|
||||
models = []
|
||||
models_lock = allocate_lock()
|
||||
|
||||
|
||||
def queryPostgres(sql):
|
||||
result = Popen(postgresCmd + "\"" + sql + "\"", stdout=PIPE, shell=True)
|
||||
retVal = []
|
||||
for line in result.stdout:
|
||||
retVal.append(line.strip().split("|"))
|
||||
return retVal
|
||||
|
||||
def convertModel(modelName):
|
||||
hdfTime = 0
|
||||
totTime = 0
|
||||
totTime -= time()
|
||||
print modelName, "Loading existing grid_info"
|
||||
print modelName, "Querying grib database"
|
||||
rows = queryPostgres("select grib.forecasttime, grib.reftime, grib.datauri, gridcoverage.id from grib, grib_models, gridcoverage where grib.modelinfo_id = grib_models.id and grib_models.location_id = gridcoverage.id and grib_models.modelName = '%s' order by grib.forecasttime, grib.reftime" % modelName)
|
||||
print modelName, "Converting %d records" % len(rows)
|
||||
gridSql = None
|
||||
lastFile = None
|
||||
gribFiles = hdf5loc + "grib/" + modelName
|
||||
gridFiles = hdf5loc + "grid/" + modelName
|
||||
if not(isdir(hdf5loc + "grib/")):
|
||||
mkdir(hdf5loc + "grib/")
|
||||
if not(isdir(gribFiles)):
|
||||
mkdir(gribFiles)
|
||||
count = 0;
|
||||
for row in rows:
|
||||
gribforecasttime = row[0]
|
||||
gribreftime = row[1]
|
||||
gribdatauri = row[2]
|
||||
gridcoverageid = row[3]
|
||||
datauriparts = gribdatauri.split("/")
|
||||
datatime = datauriparts[2]
|
||||
paramabbrev = datauriparts[4]
|
||||
masterlevel = datauriparts[5]
|
||||
levelone = datauriparts[6]
|
||||
leveltwo = datauriparts[7]
|
||||
pert = datauriparts[9]
|
||||
version = datauriparts[10]
|
||||
secondaryId = "null"
|
||||
if version != "0":
|
||||
secondaryId = "Version" + version
|
||||
ensembleId = convertPert(pert)
|
||||
newdatauri = "/grid/" + datatime + "/" + modelName + "/" + secondaryId + "/" + ensembleId + "/" + gridcoverageid + "/" + paramabbrev + "/" + masterlevel + "/" + levelone + "/" + leveltwo
|
||||
if paramabbrev.startswith("static") and gribforecasttime != 0:
|
||||
continue
|
||||
hdfTime -= time()
|
||||
try:
|
||||
forecast = int(gribforecasttime)/3600
|
||||
prevgrp = gribdatauri
|
||||
newgrp = newdatauri
|
||||
dataset="Data"
|
||||
if paramabbrev.startswith("static"):
|
||||
forecast = 0
|
||||
prevgrp = "/"
|
||||
newgrp = "/" + gridcoveragename
|
||||
dataset=paramabbrev
|
||||
filebase = "/%s-%s-FH-%.3d.h5" % (modelName, gribreftime.split(":")[0].replace(" ", "-"), forecast)
|
||||
hdf5file = gribFiles + filebase
|
||||
if lastFile != None and lastFile.filename != hdf5file:
|
||||
#print "Closing", lastFile.filename
|
||||
lastFile.close()
|
||||
lastFile = None
|
||||
if lastFile == None:
|
||||
if not(exists(hdf5file)):
|
||||
t0 = time()
|
||||
move(gridFiles+filebase, gribFiles)
|
||||
hdfTime -= (time() - t0)
|
||||
#print "Opening", hdf5file
|
||||
lastFile = h5py.File(hdf5file)
|
||||
copyH5(lastFile, newgrp, prevgrp, dataset)
|
||||
except:
|
||||
print modelName, "Error", gribdatauri
|
||||
print sys.exc_info()[1]
|
||||
hdfTime += time()
|
||||
continue
|
||||
hdfTime += time()
|
||||
count += 1
|
||||
if count % maxRecords == 0:
|
||||
print modelName, "Processed %d grid records %d%%" % (maxRecords,100*count/len(rows))
|
||||
totTime += time()
|
||||
print modelName, "Time in hdf5 links = %ds" % (hdfTime)
|
||||
print modelName, "Total process Time = %ds" % (totTime)
|
||||
|
||||
def convertPert(pert):
|
||||
if pert == "1":
|
||||
return "ctl1"
|
||||
elif pert == "2":
|
||||
return "ctl2"
|
||||
elif pert == "3":
|
||||
return "n1"
|
||||
elif pert == "4":
|
||||
return "p1"
|
||||
elif pert == "5":
|
||||
return "n2"
|
||||
elif pert == "6":
|
||||
return "p2"
|
||||
elif pert == "7":
|
||||
return "n3"
|
||||
elif pert == "8":
|
||||
return "p3"
|
||||
elif pert == "9":
|
||||
return "n4"
|
||||
elif pert == "10":
|
||||
return "p4"
|
||||
elif pert == "11":
|
||||
return "n5"
|
||||
elif pert == "12":
|
||||
return "p5"
|
||||
return "null"
|
||||
|
||||
def copyH5(h5, gribdatauri, griddatauri, dataset="Data"):
|
||||
gribgrp = h5['/']
|
||||
gridgrp = gribgrp
|
||||
for part in gribdatauri.split('/'):
|
||||
if part:
|
||||
gribgrp = gribgrp[part]
|
||||
for part in griddatauri.split('/'):
|
||||
if part:
|
||||
gridgrp = gridgrp.require_group(part)
|
||||
if not(dataset in gridgrp.keys()):
|
||||
plists = {'lcpl': gribgrp[dataset]._lcpl, 'lapl': gribgrp[dataset]._lapl}
|
||||
plists['lcpl'].set_create_intermediate_group(False)
|
||||
h5py.h5o.link(gribgrp[dataset].id, gridgrp.id, dataset, **plists)
|
||||
|
||||
def processModels():
|
||||
while(True):
|
||||
models_lock.acquire()
|
||||
if len(models) == 0:
|
||||
global numThreads
|
||||
numThreads -= 1
|
||||
models_lock.release()
|
||||
break
|
||||
model = models.pop()
|
||||
models_lock.release()
|
||||
try:
|
||||
convertModel(model)
|
||||
except:
|
||||
print model, "Error model aborted"
|
||||
print sys.exc_info()[1]
|
||||
|
||||
def loadAll():
|
||||
global models
|
||||
print "This script will convert grid data in edex to use the old grib format"
|
||||
print "You provided no arguments so this will convert almost all data."
|
||||
print "To convert only specific models you can cancel and list models as arguments"
|
||||
print ""
|
||||
for row in queryPostgres("select distinct modelname from grib_models"):
|
||||
models.append(row[0])
|
||||
print "To save time some grid models will be skipped, these grids will not be"
|
||||
print "available until the next model run is ingested. If you would like to convert any"
|
||||
print "of these models simply run the conversion script again with a list of models as arguments."
|
||||
print ""
|
||||
bad = []
|
||||
good = []
|
||||
for model in models:
|
||||
if model.startswith("UnknownModel"):
|
||||
bad.append(model)
|
||||
else:
|
||||
good.append(model)
|
||||
if len(bad) > 0:
|
||||
print "These Unknown Models will not be converted:",
|
||||
for model in bad:
|
||||
print "\"" + model + "\"",
|
||||
print ""
|
||||
print ""
|
||||
models = good
|
||||
for key in blacklistGrids:
|
||||
blacklist = blacklistGrids[key]
|
||||
bad = []
|
||||
good = []
|
||||
for model in models:
|
||||
if model in blacklist:
|
||||
bad.append(model)
|
||||
else:
|
||||
good.append(model)
|
||||
if len(bad) > 0:
|
||||
print "These " + key + " will not be converted:",
|
||||
for model in bad:
|
||||
print "\"" + model + "\"",
|
||||
print ""
|
||||
print ""
|
||||
models = good
|
||||
print "To continue converting the data Press Enter or Ctrl-C to cancel."
|
||||
raw_input()
|
||||
|
||||
def check_table(tablename):
|
||||
rows = queryPostgres("SELECT count(*) FROM information_schema.tables WHERE table_name = '" + tablename + "';")
|
||||
if(rows[0][0] != "1"):
|
||||
print tablename, "table does not exist, please create tables"
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
t = time()
|
||||
check_table("grib")
|
||||
if len(sys.argv) == 1:
|
||||
loadAll()
|
||||
else:
|
||||
for i in range(1,len(sys.argv)):
|
||||
models.append(sys.argv[i])
|
||||
print "Starting %d threads to process models" % (numThreads)
|
||||
for i in range(numThreads-1):
|
||||
start_new_thread(processModels, ())
|
||||
processModels()
|
||||
while numThreads > 0:
|
||||
sleep(5)
|
||||
print "Total Conversion time %ds" % (time() - t)
|
24
deltaScripts/unified_grid_rollback/copy_grib_purge_rules.sh
Normal file
24
deltaScripts/unified_grid_rollback/copy_grib_purge_rules.sh
Normal file
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
# This script will copy any grib purge rules to a equivalent grid purge rules file
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
IFS=$'\n'
|
||||
files=`find /awips2/edex/data/utility/common_static/site/*/purge/gridPurgeRules.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No site level grid purge files found!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
echo Deleting $f
|
||||
rm $f
|
||||
done
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
58
deltaScripts/unified_grid_rollback/create_grid_tables.sh
Normal file
58
deltaScripts/unified_grid_rollback/create_grid_tables.sh
Normal file
|
@ -0,0 +1,58 @@
|
|||
#!/bin/bash
|
||||
# This script will add create tables for the grid plugin
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
GRID_COMMAND="DROP TABLE grid;"
|
||||
INFO_SEQ_COMMAND="DROP SEQUENCE gridinfo_seq;"
|
||||
INFO_COMMAND="DROP TABLE grid_info;"
|
||||
PARAM_COMMAND="DROP TABLE parameter;"
|
||||
SQL_COMMAND_REGISTER="delete from plugin_info where name = 'grid' OR name = 'parameter';"
|
||||
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_REGISTER}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${GRID_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${INFO_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${INFO_SEQ_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${PARAM_COMMAND}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "INFO: The update was successfully applied."
|
||||
|
||||
exit 0
|
54
deltaScripts/unified_grid_rollback/register_grid_coverage.sh
Normal file
54
deltaScripts/unified_grid_rollback/register_grid_coverage.sh
Normal file
|
@ -0,0 +1,54 @@
|
|||
#!/bin/bash
|
||||
# This script will add register the gridcoverage plugin, which was previously part of grib
|
||||
#
|
||||
# This needs to be performed with build ????
|
||||
#
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
SQL_COMMAND_CHECK="select * FROM gridcoverage LIMIT 1;"
|
||||
SQL_COMMAND_REGISTER="delete from plugin_info where name = 'gridcoverage';"
|
||||
SQL_COMMAND_SEQ="DROP SEQUENCE gridcoverage_seq;"
|
||||
SQL_COMMAND_UPDATE_ID="update gridcoverage set id=id*10000;"
|
||||
SQL_COMMAND_ALTER_NAME_DESC="ALTER TABLE gridcoverage ADD COLUMN description character varying(3071), ALTER COLUMN name TYPE character varying(2047);"
|
||||
|
||||
if [ ! -f ${PSQL} ]; then
|
||||
echo "ERROR: The PSQL executable does not exist - ${PSQL}."
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_CHECK}" > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: gridcoverage table does not exist so we are not registering the plugin"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_REGISTER}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_SEQ}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to delete gridcoverage_seq"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_UPDATE_ID}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: unable to update gridcoverage ids"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND_ALTER_NAME_DESC}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "WARN: unable to add description column to gridcoverage table"
|
||||
fi
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
29
deltaScripts/unified_grid_rollback/update_D2D_bundles.sh
Normal file
29
deltaScripts/unified_grid_rollback/update_D2D_bundles.sh
Normal file
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D bundle files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
files=`find /awips2/edex/data/utility/cave_static/*/*/bundles/ -iname '*.xml'`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No bundle files found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
bash $MY_DIR/update_saved_display.sh $f
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
26
deltaScripts/unified_grid_rollback/update_D2D_procedures.sh
Normal file
26
deltaScripts/unified_grid_rollback/update_D2D_procedures.sh
Normal file
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
# This script will update any D2D procedures files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host the cave localization files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
files=`ls /awips2/edex/data/utility/cave_static/*/*/procedures/*.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "No procedures found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MY_DIR=`dirname $0`
|
||||
|
||||
for f in $files; do
|
||||
bash $MY_DIR/update_saved_display.sh $f
|
||||
done
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
33
deltaScripts/unified_grid_rollback/update_FFMP_Source.sh
Normal file
33
deltaScripts/unified_grid_rollback/update_FFMP_Source.sh
Normal file
|
@ -0,0 +1,33 @@
|
|||
#!/bin/bash
|
||||
# This script will update any FFMPSourceConfig.xml files
|
||||
# to use grid data in place of grib
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update is only for edex servers which host FFMPSourceConfig.xml files
|
||||
|
||||
echo ""
|
||||
echo "Press Enter to undo the updates Ctrl-C to quit."
|
||||
read done
|
||||
|
||||
files=`find /awips2/edex/data/utility/common_static -iname FFMPSourceConfig.xml`
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "FATAL: Update Failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for f in $files; do
|
||||
echo Updating $f
|
||||
bf=$f.bak.`date +%m%d%y`
|
||||
cp $f $bf
|
||||
# reconstruct data uris from grib to grid
|
||||
awk -F '/' '
|
||||
/<dataPath>\/grid/ {print $1 "/grib/" $3 "/" $4 "/" $8 "/" $9 "/" $10 "/" $11 "/null/null/" $12 "/" $13; next;}
|
||||
{gsub(/<plugin>grid<\/plugin>/,"<plugin>grib</plugin>"); print; }
|
||||
' $bf > $f
|
||||
done
|
||||
|
||||
|
||||
|
||||
echo "INFO: The update was successfully removed."
|
||||
exit 0
|
46
deltaScripts/unified_grid_rollback/update_saved_display.sh
Normal file
46
deltaScripts/unified_grid_rollback/update_saved_display.sh
Normal file
|
@ -0,0 +1,46 @@
|
|||
#!/bin/bash
|
||||
# This script will update any saved displays from the grib format to the grid format
|
||||
#
|
||||
# This update needs to be performed with build ???.
|
||||
# This update only needs to be run if there are saved displays being stored outside of localization.
|
||||
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Please provide a list of saved displays to update."
|
||||
fi
|
||||
|
||||
IFS=$'\n'
|
||||
|
||||
for f in "$@"; do
|
||||
echo Updating $f
|
||||
#bf=$f.bak.`date +%m%d%y`
|
||||
#cp $f $bf
|
||||
# its probably not efficient to execute sed 20 times but its not slow...
|
||||
# replace perturbationNumbers with ensmble ids
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)ctl1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\21\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)ctl2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\22\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\23\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p1\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\24\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\25\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p2\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\26\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n3\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\27\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p3\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\28\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n4\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\29\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p4\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\210\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)n5\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\211\3/g;p;}' -i $f
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.ensembleId\("\s*>\s*<\s*constraint\s\+constraintValue="\)p5\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1modelInfo.perturbationNumber\212\3/g;p;}' -i $f
|
||||
# handle grid version
|
||||
sed -n '1h;1!H;${;g;s/\(<mapping\s\+key="\)info\.secondaryId\("\s*>\s*<\s*constraint\s\+constraintValue="\)Version\([0-9]\{1,2\}\)\("\s\+constraintType="EQUALS"\s*\/>\s*<\/mapping>\)/\1gridVersion\2\3\4/g;p;}' -i $f
|
||||
# level
|
||||
sed -n 's/key="info\.level\.levelonevalue"/key="modelInfo.level.levelonevalue"/g;p;' -i $f
|
||||
sed -n 's/key="info\.level\.leveltwovalue"/key="modelInfo.level.leveltwovalue"/g;p;' -i $f
|
||||
sed -n 's/key="info\.level\.masterLevel.name"/key="modelInfo.level.masterLevel.name"/g;p;' -i $f
|
||||
# parameter
|
||||
sed -n 's/key="info\.parameter.abbreviation"/key="modelInfo.parameterAbbreviation"/g;p;' -i $f
|
||||
# dataset
|
||||
sed -n 's/key="info\.datasetId"/key="modelInfo.modelName"/g;p;' -i $f
|
||||
#plugin name
|
||||
sed -n 's/constraintValue="grid"/constraintValue="grib"/g;p;' -i $f
|
||||
#diff $f $bf > /dev/null
|
||||
#if [ $? -eq 0 ]; then rm $bf; echo "No Changes"; fi
|
||||
done
|
|
@ -103,6 +103,7 @@ public abstract class GridCoverage extends PersistableDataObject implements
|
|||
protected String name;
|
||||
|
||||
/** A description of the grid coverage */
|
||||
@Transient
|
||||
@XmlElement
|
||||
@DynamicSerializeElement
|
||||
protected String description;
|
||||
|
|
Loading…
Add table
Reference in a new issue