Merge remote-tracking branch 'unidata/unidata_18.1.1' into unidata_18.1.1-osx

This commit is contained in:
Michael James 2019-01-10 11:03:47 -07:00
commit fd6f0af211
591 changed files with 3652 additions and 25495 deletions

45
build/rsync.cave Normal file
View file

@ -0,0 +1,45 @@
edexOsgi/* cave/* localization/*
javaUtilities/* rpms pythonPackages
*.pdf
../awips2-nativelib/*
../awips2-core/common/*
../awips2-core/edex/*
../awips2-core/features/*
../awips2-core/viz/*
../awips2-core-foss/lib/*
../awips2-foss/lib/*
../awips2-hazards/edex/*
../awips2-hazards/common/*
../awips2-hazards/viz/*
../awips2-ncep/common/*
../awips2-ncep/viz/*
../awips2-ncep/features/*
../awips2-ncep/edex/*
../awips2-goesr/edexOsgi/*
../awips2-goesr/cave/*
../awips2-unidata/*
../python-awips
../awips2-cimss/viz/*
../awips2-cimss/edex/*
../awips2-cimss/features/*
../awips2-cimss/common/*
../awips2-data-delivery/common/*
../awips2-data-delivery/edex/*
../awips2-data-delivery/features/*
../awips2-data-delivery/viz/*
../awips2-drawing/viz/*
../awips2-drawing/features/*
../awips2-gsd/viz/*
../awips2-gsd/features/*
../awips2-nasa/edex/*
../awips2-ogc/foss/*
../awips2-ogc/edex/*
../awips2-ogc/features/*
../awips2-ohd/edex/*
../awips2-ohd/apps/*
../awips2-ohd/features/*
../awips2-ohd/lib/*
../awips2-swpc/common/*
../awips2-swpc/edex/*
../awips2-swpc/viz/*
../awips2-swpc/features/*

View file

@ -10,10 +10,6 @@ javaUtilities/* rpms pythonPackages
../awips2-foss/lib/*
../awips2-rpm/foss
../awips2-rpm/installers
../awips2-nws/common/*
../awips2-nws/viz/*
../awips2-nws/features/*
../awips2-nws/edex/*
../awips2-hazards/edex/*
../awips2-hazards/common/*
../awips2-hazards/viz/*
@ -21,6 +17,7 @@ javaUtilities/* rpms pythonPackages
../awips2-ncep/viz/*
../awips2-ncep/features/*
../awips2-ncep/edex/*
../awips2-nws/edex/*
../awips2-goesr/edexOsgi/*
../awips2-goesr/cave/*
../awips2-unidata/*
@ -29,10 +26,6 @@ javaUtilities/* rpms pythonPackages
../awips2-cimss/edex/*
../awips2-cimss/features/*
../awips2-cimss/common/*
../awips2-collaboration/viz/*
../awips2-collaboration/features/*
../awips2-collaboration/common/*
../awips2-collaboration/foss/*
../awips2-data-delivery/common/*
../awips2-data-delivery/edex/*
../awips2-data-delivery/features/*

View file

@ -2,7 +2,6 @@ com.raytheon.uf.common.base.feature
com.raytheon.uf.viz.dataplugin.obs.feature
com.raytheon.uf.viz.sounding.feature
com.raytheon.uf.viz.cots.feature
com.raytheon.uf.viz.registry.feature
com.raytheon.uf.viz.common.core.feature
com.raytheon.uf.viz.dataplugins.feature
com.raytheon.viz.feature.awips
@ -30,6 +29,7 @@ com.raytheon.uf.viz.vtec.feature
com.raytheon.viz.text.feature
com.raytheon.viz.warngen.feature
com.raytheon.uf.viz.d2d.ui.awips.feature
com.raytheon.uf.viz.d2d.gfe.feature
com.raytheon.uf.viz.ncep.dataplugins.feature
com.raytheon.uf.viz.alertview.feature
com.raytheon.viz.satellite.feature
@ -42,3 +42,6 @@ com.raytheon.uf.viz.ncep.npp.feature
com.raytheon.uf.viz.ncep.perspective.feature
com.raytheon.uf.viz.d2d.skewt.feature
gov.noaa.gsd.viz.ensemble.feature
edu.wisc.ssec.cimss.viz.convectprob.feature
gov.noaa.nws.mdl.viz.boundaryTool.common.feature
com.raytheon.uf.viz.satellite.goesr.feature

View file

@ -148,9 +148,6 @@
<param name="feature" value="com.raytheon.viz.feature.awips" />
<param name="omit" value="true" />
</antcall>
<!--
we will still build this for the MSFT Windows CAVE.
-->
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.common.base.feature" />
</antcall>
@ -181,9 +178,6 @@
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.dataplugins.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.dat.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.core.feature" />
</antcall>
@ -217,15 +211,9 @@
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.core.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.aviation.advisory.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.dataplugins.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.hydro.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.xy.feature" />
</antcall>
@ -235,9 +223,6 @@
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ncep.nsharp.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.archive.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.alertview.feature" />
</antcall>
@ -247,12 +232,6 @@
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.thinclient.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.registry.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.datadelivery.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.npp.feature" />
</antcall>
@ -274,24 +253,12 @@
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.acarssounding.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.gfe.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.gisdatastore.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.ui.awips.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.damagepath.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="edu.wisc.ssec.cimss.viz.convectprob.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.mdl.viz.boundaryTool.common.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.satellite.goesr.feature" />
</antcall>
@ -299,7 +266,22 @@
<param name="feature" value="gov.noaa.gsd.viz.ensemble.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.mdl.viz.awipsref.feature" />
<param name="feature" value="com.raytheon.uf.viz.vtec.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.dataaccess.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.gfe.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="edu.wisc.ssec.cimss.viz.convectprob.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.mdl.viz.boundaryTool.common.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.gisdatastore.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.obs.viz.geodata.feature" />
@ -307,20 +289,18 @@
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.ocp.uf.viz.gisdatastore.feature" />
</antcall>
<!--
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.ohd.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.scan.feature" />
<param name="feature" value="gov.noaa.nws.mdl.viz.awipsref.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.server.edex.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.vtec.feature" />
<param name="feature" value="com.raytheon.uf.viz.ohd.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.viz.dataaccess.feature" />
<param name="feature" value="com.raytheon.uf.viz.scan.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="edu.ucar.unidata.uf.viz.feature" />
@ -340,6 +320,7 @@
<antcall target="p2.build.repo">
<param name="feature" value="gov.noaa.nws.ocp.viz.psh.feature" />
</antcall>
-->
<antcall target="cleanup.features" />
</target>

View file

@ -1,9 +0,0 @@
#!/bin/sh
export DISPLAY=":0.0"
export FXA_HOME=/awips2/cave/caveEnvironment
export TMCP_HOME=/awips2/cave/caveEnvironment
$FXA_HOME/bin/MonitorTestMode >& /dev/null &

View file

@ -1,64 +0,0 @@
#!/bin/sh
# determine where the script is being ran from.
path_to_script=`readlink -f $0`
RUN_FROM_DIR=`dirname ${path_to_script}`
BASE_ENV_DIR=`dirname ${RUN_FROM_DIR}`
#DR 18113 rehost. /awips2/fxa/... Has kicked the bit-bucket.
export TMCP_HOME=/awips2/cave/caveEnvironment
export FXA_HOME=/awips2/cave/caveEnvironment
if [ ! -n "${TMCP_HOME}" ]
then
echo -e "\e[1;31mTMCP_HOME is not set.\e[m"
echo -e "\e[0;32mSetting TMCP_HOME to '${BASE_ENV_DIR}'.\e[m"
export TMCP_HOME=${BASE_ENV_DIR}
else
echo "TMCP_HOME is '${TMCP_HOME}'"
fi
if [ ! -n "${FXA_HOME}" ]
then
echo -e "\e[1;31mFXA_HOME is not set.\e[m"
echo -e "\e[0;32mSetting FXA_HOME to '${BASE_ENV_DIR}'.\e[m"
export FXA_HOME=${BASE_ENV_DIR}
else
echo "FXA_HOME is '${FXA_HOME}'"
fi
# determine if 'FXA_WARNGEN_PRODUCT_ID' needs to be set
HOST=`uname -n`
TMP_HOST_NUMBER=`uname -n | awk '{print substr($1, 3, 1);}'`
ALT_HOST_PART=`uname -n | awk '{print substr($1, 3, length($1) - 1);}'`
ALT_HOST="xt"${ALT_HOST_PART}
ping -c 1 -w 1 ${ALT_HOST} >/dev/null 2>/dev/null
RC=`echo $?`
if [ "${RC}" = "0" ]
then
if [ ! -n "${FXA_WARNGEN_PRODUCT_ID}" ]
then
echo -e "\e[1;31mFXA_WARNGEN_PRODUCT_ID is not set.\e[m"
echo -e "\e[0;32mSetting FXA_WARNGEN_PRODUCT_ID to '${TMP_HOST_NUMBER}'.\e[m"
export FXA_WARNGEN_PRODUCT_ID=${TMP_HOST_NUMBER}
else
echo "FXA_WARNGEN_PRODUCT_ID is '${FXA_WARNGEN_PRODUCT_ID}'."
fi
else
echo -e "\e[1;31mPartner host \""${ALT_HOST}"\" is unreachable by network!\e[m"
echo ${ALT_HOME}
echo
fi
export LD_LIBRARY_PATH=$TMCP_HOME/lib:$LD_LIBRARY_PATH
# for TMCP logs
if [ ! -d $HOME/caveData/tmcpLogs ]; then
mkdir -p $HOME/caveData/tmcpLogs
fi
export LOG_DIR=$HOME/caveData/tmcpLogs
$TMCP_HOME/bin/tmcp

View file

@ -1,10 +0,0 @@
#!/bin/sh
export DISPLAY=:0.0
export FXA_HOME=/awips2/cave/caveEnvironment
if [ $6 = "kde" ]
then
kstart --alldesktops $FXA_HOME/bin/showBanner $2 $3 $4 $5 &
else
$FXA_HOME/bin/showBanner $2 $3 $4 $5 &
fi

View file

@ -29,16 +29,4 @@
resourceType="PLAN_VIEW"/>
</extension>
<!-- for making CWAT available on Localization Perspective in CAVE: 2012-05-14 from DHladky -->
<extension
point="com.raytheon.uf.viz.localization.perspective.localizationpath">
<path
application="D2D"
localizationType="common_static"
name="CWAT"
recursive="true"
value="cwat">
</path>
</extension>
</plugin>

View file

@ -20,16 +20,6 @@
-->
<?eclipse version="3.3"?>
<plugin>
<extension
point="com.raytheon.uf.viz.localization.perspective.localizationpath">
<path
application="D2D"
localizationType="CAVE_STATIC"
name="Cloud Height"
value="cloudheight"
recursive="true">
</path>
</extension>
<extension
point="com.raytheon.viz.ui.displayCustomizer">
<displayCustomizer

View file

@ -23,15 +23,4 @@
</contextualMenu>
</extension>
<extension
point="com.raytheon.uf.viz.localization.perspective.localizationpath">
<path
application="D2D"
extensionFilter=".xml"
localizationType="COMMON_STATIC"
name="Damage Path"
recursive="false"
value="damagepath">
</path>
</extension>
</plugin>

View file

@ -99,27 +99,4 @@
sortID="116">
</contextualMenu>
</extension>
<extension
point="com.raytheon.uf.viz.localization.perspective.localizationpath">
<path
application="D2D"
localizationType="common_static"
name="FFMP"
recursive="true"
value="ffmp">
</path>
<path
application="D2D"
localizationType="common_static"
name="Monitoring"
recursive="true"
value="monitoring">
</path>
<path
application="D2D"
localizationType="cave_static"
name="FFMP GUI CONFIG"
value="ffmp/guiConfig">
</path>
</extension>
</plugin>

View file

@ -27,13 +27,4 @@
type="nucaps">
</verticalSoundingProvider>
</extension>
<extension
point="com.raytheon.uf.viz.localization.perspective.localizationpath">
<path
application="D2D"
localizationType="cave_static"
name="NUCAPS SOUNDING CONFIG"
value="nucaps">
</path>
</extension>
</plugin>

View file

@ -19,34 +19,11 @@
**/
package com.raytheon.uf.viz.thinclient.ui;
import java.io.IOException;
import org.eclipse.jface.preference.IPersistentPreferenceStore;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import com.raytheon.uf.common.localization.msgs.GetServersResponse;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.viz.core.comm.ConnectivityManager;
import com.raytheon.uf.viz.core.comm.ConnectivityManager.ConnectivityResult;
import com.raytheon.uf.viz.core.comm.IConnectivityCallback;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.localization.ConnectivityPreferenceDialog;
import com.raytheon.uf.viz.core.localization.LocalizationConstants;
import com.raytheon.uf.viz.core.localization.LocalizationManager;
import com.raytheon.uf.viz.core.localization.ServerRemembrance;
import com.raytheon.uf.viz.thinclient.Activator;
import com.raytheon.uf.viz.thinclient.ThinClientUriUtil;
import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants;
/**
@ -93,7 +70,7 @@ public class ThinClientConnectivityDialog extends ConnectivityPreferenceDialog {
private String proxyAddress;
public ThinClientConnectivityDialog(boolean checkAlertViz) {
super(checkAlertViz, "Unidata AWIPS");
super(checkAlertViz);
IPreferenceStore store = Activator.getDefault().getPreferenceStore();
dataRefreshMethod = store
.getString(ThinClientPreferenceConstants.P_DATA_REFRESH_METHOD);

View file

@ -71,21 +71,4 @@
<component key="cigvisdist" class="com.raytheon.viz.aviation.CigVisDistComponent"/>
<component key="cigvistrend" class="com.raytheon.viz.aviation.CigVisTrendComponent"/>
</extension>
<extension
point="com.raytheon.uf.viz.localization.perspective.localizationpath">
<path
application="AvnFPS"
localizationType="COMMON_STATIC"
name="Configuration"
value="aviation/config"
recursive="true">
</path>
<path
application="AvnFPS"
localizationType="CAVE_STATIC"
name="Avnwatch"
value="aviation/avnwatch"
recursive="true">
</path>
</extension>
</plugin>

View file

@ -1,4 +0,0 @@
#!/usr/bin/bash
# This script removes obsolete ISC Write Lock records from the cluster_task table
#
/awips2/psql/bin/psql -U awips -d metadata -c "delete from cluster_task where name = 'ISC Write Lock' and details not like '%:%';"

View file

@ -1,20 +0,0 @@
#!/usr/bin/env python
# update-state-json-vrh.py - Update's VRH's state.json file prior to installing Qpid SSL certificates onto cpsbn1 and cpsbn2
#
# Modification History
#
# Name Date Comments
# ---------------------------------------------------------------------------
# Qihan Zhang 2017-10-11 DR 20377 - Initial creation
import json
with open('/etc/pki/a2pgca/state/state.json', 'r+') as f:
data = json.load(f)
for target in data['targets']:
if target['name'] == 'cp1f' and target['type'] == 'server':
target['location_specs'] = ['server:$PX_SERVERS:/awips2/qpid']
f.seek(0)
json.dump(data, f, indent=4)
f.truncate()

View file

@ -1,10 +0,0 @@
#!/bin/sh
# DR 20377 deltaScript for site VRH since Qpid is running on the PXs there
. /data/fxa/INSTALL/awips2/scripts/.global || exit 1
case "${SITE_IDENTIFIER}" in
${remCPCaseArray} ) dir=$(cd "$(dirname "$0")"; pwd)
"$dir"/update-state-json-vrh.py
a2pgca refresh server:cp1f ;;
esac

View file

@ -1,8 +0,0 @@
#!/bin/bash
# This script sets the correct rwx permissions for all files on specified NFS
# mounts. See the file /awips2/fxa/bin/set-nfs-perms.sh for details.
# Run this script on dx1 only, as root. This script will ssh from dx1 into
# other boxes as necessary to do work.
bash /awips2/fxa/bin/set-nfs-perms.sh

View file

@ -1,132 +0,0 @@
#!/bin/bash
# This script updates nfs mounts in /etc/fstab to set noexec, nodev,
# nosuid options as necessary, then remounts all nfs mounts in
# /etc/fstab.
#
# Author: tgurney
if [[ $(id -u) -ne 0 ]]; then
echo $0: Need to be root.
exit 1
fi
fstab_location=/etc/fstab
update_fstab=$(mktemp || exit 1)
cat > $update_fstab << 'EOF'
#!/usr/bin/env python2
import re
import sys
import os.path
FSTAB_PATTERN = r'([^#]\S*)\s+(\S+)\s+(\S+)\s+(\S+)(\s+[0-9]+)?(\s+[0-9]+)?'
MOUNTS = {
'/awips2/edex/data': ['nodev', 'nosuid'],
'/archive': ['nodev', 'noexec', 'nosuid'],
'/awips2/edex/data': ['nodev', 'nosuid'],
'/awips2/edex/data/fxa/trigger': ['nodev', 'noexec', 'nosuid'],
'/awips2/edex/data/manual': ['nodev', 'noexec', 'nosuid'],
'/awips2/edex/data/share': ['nodev', 'nosuid'],
'/awips2/edex/data/utility': ['nodev', 'noexec', 'nosuid'],
'/awips2/rcm/data/config': ['nodev', 'noexec', 'nosuid'],
'/data/fxa/INSTALL/awips2': ['nodev', 'nosuid'],
'/home': ['nodev', 'nosuid'],
'/awips2/bmh/conf': ['nodev', 'noexec', 'nosuid'],
'/awips2/bmh/data': ['nodev', 'noexec', 'nosuid'],
'/awips2/bmh/neospeech/result': ['nodev', 'noexec', 'nosuid'],
'/nsbn_store': ['nodev', 'noexec', 'nosuid'],
'/data_store': ['nodev', 'noexec', 'nosuid'],
'/awips2/GFESuite': ['nodev', 'nosuid'],
'/awips2/qpid/edexMessageStore': ['nodev', 'noexec', 'nosuid'],
'/awips2/qpid/messageStore': ['nodev', 'noexec', 'nosuid'],
'/tmp/awips2/edex/data': ['nodev', 'noexec', 'nosuid'],
'/tmp/awips2/GFESuite': ['nodev', 'noexec', 'nosuid'],
'/tmp/home': ['nodev', 'noexec', 'nosuid']
}
for line in sys.stdin:
line = line.strip()
m = re.match(FSTAB_PATTERN, line)
if not m:
print line
continue
fstab = {'vol': m.group(1), 'mount': m.group(2),
'fs': m.group(3), 'opts': m.group(4).split(','),
'dump': m.group(5) or '0', 'pass': m.group(6) or '0'
}
fstab['mount'] = os.path.abspath(fstab['mount'])
if fstab['fs'] == 'nfs' and fstab['mount'] in MOUNTS:
if 'defaults' in fstab['opts']:
fstab['opts'].remove('defaults')
for opt in MOUNTS[fstab['mount']]:
if opt not in fstab['opts']:
fstab['opts'].append(opt)
fields = (fstab['vol'],
fstab['mount'],
fstab['fs'],
','.join(fstab['opts']),
fstab['dump'],
fstab['pass']
)
print "%s\t%s\t%s\t%s\t%s %s" % fields
EOF
tmp_fstab=$(mktemp || exit 1)
cleanup_exit() {
rm -f $tmp_fstab $update_fstab
exit $1
}
echo INFO: Updating "${fstab_location}"
cat "${fstab_location}" | python2 $update_fstab > $tmp_fstab || cleanup_exit 1
fstab_backup="${fstab_location}.$(date +%Y%m%d.%H%M%S)"
cp "${fstab_location}" $fstab_backup || cleanup_exit 1
echo INFO: Old fstab was saved to $fstab_backup
mv $tmp_fstab "${fstab_location}" || cleanup_exit 1
chmod 644 "${fstab_location}"
for item in $(awk '$3 == "nfs" {print $2}' /etc/mtab); do
for fstab_item in $(grep -Ev '(^#|^\s*$)' "${fstab_location}" | awk '$3 == "nfs" {print $2}'); do
if [[ "$item" == "$fstab_item" ]]; then
if [[ "$item" == /awips2/bmh/neospeech/result* ]]; then
# This particular mount may fail to "mount -o remount" due to strange mount options.
# So we have to unmount and then mount
echo INFO: Unmounting $item
umount $item
echo INFO: Mounting $item
mount $item
else
echo INFO: Remounting $item
mount -o remount $item
fi
fi
done
done
errors=$(mount -fav 2>&1 | grep -v ' : ')
if [[ $? -eq 0 ]]; then
failed_location=/tmp/fstab.$(date +%Y%m%d.%H%M%S).failed
cp "${fstab_location}" $failed_location
echo
echo !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
echo
echo ERROR: fstab failed validation! See below errors.
echo Original "${fstab_location}" has been restored from backup.
echo Failed fstab has been saved to $failed_location
echo
echo $errors
echo
echo !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
cp -v $fstab_backup "${fstab_location}"
cleanup_exit 1
fi
echo INFO: Done.
cleanup_exit 0

View file

@ -1,4 +0,0 @@
#!/bin/sh
# DR 6081- Enabled certificate-based authentication for qpid.
a2pgca refresh

View file

@ -1,4 +0,0 @@
#!/bin/sh
# DR 6086 - Update Radar Server / rcm to utilize SSL to connect to Qpid
sed -i "s/:5672'</:5672'\&amp;ssl='true'</g" /awips2/rcm/data/config/persist/config.xml

View file

@ -1,134 +0,0 @@
#!/awips2/python/bin/python2
##
# DR 6252 - This script will update any found site-level overrides to
# cave_static/base/textws/gui/QualityControlCfg.xml with new configuration
# entries requires to support the DSW and SQW WarnGen products.
##
import logging
import glob
import re
import sys
import xml.dom.minidom as minidom
import xml.etree.ElementTree as ET
logging.basicConfig(format='%(asctime)-15s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO)
log = logging.getLogger("updateQualityControlCfg.py")
QC_CONFIG_PATH = "/awips2/edex/data/utility/cave_static/site/*/textws/gui/QualityControlCfg.xml"
def main():
log.info("Starting delta script for DR #6252: updating QualityControlCfg.xml...")
for file in glob.iglob(QC_CONFIG_PATH):
log.info("Updating file [%s]...", file)
tree = ET.parse(file)
root = tree.getroot()
product_map = root.find("productTypeMap")
if product_map is not None:
new_item = ET.SubElement(product_map, "item")
ET.SubElement(new_item, "key").text = "DSW"
ET.SubElement(new_item, "value").text = "Dust Storm Warning"
new_item = ET.SubElement(product_map, "item")
ET.SubElement(new_item, "key").text = "SQW"
ET.SubElement(new_item, "value").text = "Snow Squall Warning"
new_item = ET.SubElement(product_map, "item")
ET.SubElement(new_item, "key").text = "DS.W"
ET.SubElement(new_item, "value").text = "Dust Storm Warning"
new_item = ET.SubElement(product_map, "item")
ET.SubElement(new_item, "key").text = "DS.Y"
ET.SubElement(new_item, "value").text = "Dust Advisory"
new_item = ET.SubElement(product_map, "item")
ET.SubElement(new_item, "key").text = "SQ.W"
ET.SubElement(new_item, "value").text = "Snow Squall Warning"
else:
log.error("Could not find productTypeMap in file [%s].", file)
log.error("Exiting.")
sys.exit(-1)
nnn_ident = root.find("nnnOfIdent")
if nnn_ident is not None:
new_item = ET.SubElement(nnn_ident, "item")
ET.SubElement(new_item, "key").text = "DS.W"
ET.SubElement(new_item, "value").text = "DSW"
new_item = ET.SubElement(nnn_ident, "item")
ET.SubElement(new_item, "key").text = "DS.Y"
ET.SubElement(new_item, "value").text = "DSW"
new_item = ET.SubElement(nnn_ident, "item")
ET.SubElement(new_item, "key").text = "SQ.W"
ET.SubElement(new_item, "value").text = "SQW"
else:
log.error("Could not find nnnOfIdent in file [%s].", file)
log.error("Exiting.")
sys.exit(-1)
followup_nnn = root.find("followupNNN")
if followup_nnn is not None:
new_item = ET.SubElement(followup_nnn, "item")
ET.SubElement(new_item, "key").text = "DS.W"
ET.SubElement(new_item, "value").text = "DSW"
new_item = ET.SubElement(followup_nnn, "item")
ET.SubElement(new_item, "key").text = "DS.Y"
ET.SubElement(new_item, "value").text = "DSW"
new_item = ET.SubElement(followup_nnn, "item")
ET.SubElement(new_item, "key").text = "SQ.W"
ET.SubElement(new_item, "value").text = "SQW"
else:
log.error("Could not find followupNNN in file [%s].", file)
log.error("Exiting.")
sys.exit(-1)
bullet_map = root.find("bulletTypeMap")
if bullet_map is not None:
new_item = ET.SubElement(bullet_map, "item")
ET.SubElement(new_item, "key").text = "DS.Y"
ET.SubElement(new_item, "value").text = "Dust Advisory"
else:
log.error("Could not find bulletTypeMap in file [%s].", file)
log.error("Exiting.")
sys.exit(-1)
# Cleanup junk new-lines and extraneous spaces from output
pretty_xml = minidom.parseString(ET.tostring(root, 'utf-8')).toprettyxml(indent=' '*4, encoding='UTF-8')
pretty_xml = '\n'.join([line for line in pretty_xml.split('\n') if line.strip()])
text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
pretty_xml = text_re.sub('>\g<1></', pretty_xml)
# because of how the SegmentedNNN tags are defined, if we used ET to
# insert these new entries they would just be placed at the end of the
# file and not with the block of existing entries
try:
index = pretty_xml.rindex('</SegmentedNNN>')
index += len('</SegmentedNNN>') + 1
pretty_xml = pretty_xml[:index] + ' <SegmentedNNN>DSW</SegmentedNNN>\n' +\
' <SegmentedNNN>SQW</SegmentedNNN>\n' + pretty_xml[index:]
except ValueError:
log.error("Could not find SegmentedNNN in file [%s].", file)
log.error("Exiting.")
sys.exit(-1)
with open (file, 'w') as out_file:
out_file.write(pretty_xml)
log.info("Delta script complete.")
if __name__ == '__main__':
main()

View file

@ -1,34 +0,0 @@
#!/bin/bash
SQL_SCRIPT="createClimateDb.sql"
TABLESPACE_DIR="/awips2/database/tablespaces/climate"
# ensure that the sql script is present
if [ ! -f ${SQL_SCRIPT} ]; then
echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found."
echo "FATAL: the update has failed!"
exit 1
fi
echo "INFO: update started - creating Climate tablespace directory"
# ensure tablespace directory created
mkdir -p ${TABLESPACE_DIR}
if [ ! -d ${TABLESPACE_DIR} ]; then
echo "ERROR: the required directory - ${TABLESPACE_DIR} was not created."
echo "FATAL: the update has failed!"
exit 1
fi
echo "INFO: creating Climate DB"
# run the update
/awips2/psql/bin/psql -U awipsadmin -d metadata -f ${SQL_SCRIPT}
if [ $? -ne 0 ]; then
echo "FATAL: the update has failed!"
exit 1
fi
echo "INFO: the update has completed successfully!"
exit 0

View file

@ -1,73 +0,0 @@
/**
* This software was developed and / or modified by NOAA/NWS/OCP/ASDT
*
* Create climate database
*/
\set ON_ERROR_STOP 1
/*
* Connect to unrelated DB; new one does not exist and cannot be connected to the DB to be copied.
*/
\connect metadata
/*
* Create tablespace.
*/
CREATE TABLESPACE climate OWNER awipsadmin LOCATION '/awips2/database/tablespaces/climate';
COMMENT ON TABLESPACE climate IS 'Climate Database tablespace';
/*
* Copy hmdb.
*/
CREATE DATABASE climate WITH TEMPLATE hmdb;
/*
* Connect to new DB.
*/
\connect climate
/*
* Create new tables.
*/
CREATE TABLE cpg_session
(
cpg_session_id character varying(50) NOT NULL,
run_type integer NOT NULL,
prod_type integer NOT NULL,
state integer NOT NULL,
status integer,
status_desc character varying(1024),
global_config bytea,
prod_setting bytea,
report_data bytea,
prod_data bytea,
start_at timestamp without time zone NOT NULL,
last_updated timestamp without time zone NOT NULL,
pending_expire timestamp without time zone,
CONSTRAINT cpg_session_pkey PRIMARY KEY (cpg_session_id)
)
WITH (
OIDS=FALSE
);
ALTER TABLE cpg_session
OWNER TO awipsadmin;
GRANT ALL ON TABLE cpg_session TO awipsadmin;
GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE cpg_session TO awips;
GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE cpg_session TO pguser;
CREATE TABLE sent_prod_record
(
prod_id character varying(16) NOT NULL,
period_type character varying(16) NULL,
prod_type character varying(4) NOT NULL,
file_name character varying(50) NULL,
prod_text TEXT NOT NULL,
send_time timestamp without time zone NOT NULL,
user_id character varying(32) NULL,
CONSTRAINT sent_prod_record_pkey PRIMARY KEY (prod_id, send_time)
)
WITH (
OIDS=FALSE
);
ALTER TABLE sent_prod_record
OWNER TO awipsadmin;
GRANT ALL ON TABLE sent_prod_record TO awipsadmin;
GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE sent_prod_record TO awips;
GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE sent_prod_record TO pguser;

View file

@ -1,38 +0,0 @@
#!/bin/bash
# This searches for RadarUpperText xml files in common_static and updates supplemental
# elevation display tags from sails to productDependentRepElev.
#
# The base files should be updated when upgraded to 18.1.1.
COMMON=/awips2/edex/data/utility/common_static
DIR=`dirname $0`
for dir in `ls ${COMMON}` ; do
if [[ "$dir" != "base" && "$dir" != "configured" && -d "${COMMON}/$dir" ]] ; then
echo "+++ checking $dir +++"
for d in `ls ${COMMON}/$dir/` ; do
pDir="${COMMON}/$dir/$d/styleRules"
if [[ -d "$pDir" ]] ; then
if [[ -f "$pDir/RadarUpperText.xml" ]] ; then
rm $pDir/*.tmp
sed -e 's/<sails/<productDependentRepElev/g' $pDir/RadarUpperText.xml > $pDir/RadarUpperText.tmp
cmp -s $pDir/RadarUpperText.xml $pDir/RadarUpperText.tmp
if [[ $? != 0 ]] ; then
rm -f $pDir/RadarUpperText.bak
mv $pDir/RadarUpperText.xml $pDir/RadarUpperText.bak
mv $pDir/RadarUpperText.tmp $pDir/RadarUpperText.xml
chmod 664 $pDir/RadarUpperText.xml
chown awips:fxalpha $pDir/RadarUpperText.xml
echo "converted $pDir/RadarUpperText.xml"
else
echo "No conversion needed for $pDir/RadarUpperText.xml"
rm -f $pDirRadarUpperText.tmp
fi
fi
fi
done
fi
done

View file

@ -1,4 +0,0 @@
#!/usr/bin/bash
# This script removes obsolete GfeConfigureTextProducts records from the cluster_task table
#
/awips2/psql/bin/psql -U awips -d metadata -c "delete from cluster_task where name = 'GfeConfigureTextProducts';"

View file

@ -1,73 +0,0 @@
#!/usr/bin/env python2
# This script will add the new AAG permission to userRoles.xml.
# Modified from DR5816/UpdateGFEPermissions.py
USER_ROLES_PATH = "/awips2/edex/data/utility/common_static/site/*/roles/userRoles.xml"
# New permissions to be added
PERMISSIONS_TO_BE_ADDED = [
"com.raytheon.localization.site/common_static/aviation/aag"
]
import glob
import os
import shutil
import sys
import xml.etree.ElementTree as ET
def main():
for path in glob.iglob(USER_ROLES_PATH):
print "Updating", path
shutil.copyfile(path, path + ".bak")
# Ensure we have an nwsRoleData tree
tree = ET.parse(path)
root = tree.getroot()
if root.tag != "nwsRoleData":
print "ERROR: not a valid userRoles.xml file"
continue
# Ensure the application for this tree is Localization
app = root.find("application")
if app is None or app.text != "Localization":
print "ERROR: not a localization permissions file"
continue
# Add new permissions
for id in PERMISSIONS_TO_BE_ADDED:
# see if permission already present
found = False
for permission in root.iter(tag="permission"):
if permission.attrib["id"] == id:
found = True
break
# if permission not present add it
if not found:
print "Adding new permission %s" % id
ET.SubElement(root, "permission", attrib={"id": id})
else:
print "Permission %s already present" % id
for user in root.iterfind("user"):
userId = user.attrib["userId"]
if userId == "ALL":
for permission in PERMISSIONS_TO_BE_ADDED:
found = False
for userPermission in user.iter(tag="userPermission"):
if userPermission.text == permission:
found = True
break
if not found:
print "Adding permission %s for ALL users" % permission
element = ET.SubElement(user, "userPermission")
element.text = permission
else:
print "Permission %s already present for ALL users" % permission
tree.write(path, encoding="UTF-8", xml_declaration=True)
if __name__ == '__main__':
sys.exit(main())

View file

@ -1,34 +0,0 @@
#!/bin/bash
# 6183 - Move AvnFPS localization files to common_static.
#
# Author: tgurney
# March 27, 2017
did_work=false
echo INFO: Moving AvnFPS localization files to common_static
for old_location in /awips2/edex/data/utility/cave_static/*/*/aviation; do
if [[ ! -e "${old_location}" ]]; then
continue
fi
new_location=${old_location/cave_static/common_static}
echo INFO: Moving "${old_location}" to "${new_location}"
did_work=true
if [[ ! -d "${new_location}" ]]; then
sudo -u awips mkdir -p -m 750 "${new_location}"
else
echo "WARN: ${new_location} already exists. Just copying newer files"
fi
rsync -aux "${old_location}" "${new_location}/.." &&
rm -rf --one-file-system "${old_location}"
find "${new_location}" -xdev -type f -name '*.md5' -delete
find "${new_location}" -xdev -type f -name '*.pyc' -delete
find "${new_location}" -xdev -type f -name '*.pyo' -delete
echo INFO: Done moving "${old_location}" to "${new_location}"
done
if [[ "${did_work}" == "false" ]]; then
echo INFO: There are no files to move. Did nothing
else
echo INFO: Done moving localization files
fi

View file

@ -1,5 +0,0 @@
#!/bin/bash
/awips2/psql/bin/psql -U awips -d ncep -c "UPDATE stns.cities SET station_id='UTQIAGVI', name='UTQIAGVIK' WHERE station_number='25711' and name='BARROW';"
/awips2/psql/bin/psql -U awips -d hmdb -c "UPDATE sta_agency_codes SET agency_sta_name='UTQIAGVIK (BARROW)' WHERE station_id=23210 and agency_sta_name='BARROW';"
/awips2/psql/bin/psql -U awips -d hmdb -c "UPDATE station_location SET station_name='UTQIAGVIK (BARROW)' WHERE station_id=23210 and station_name='BARROW';"

View file

@ -1,421 +0,0 @@
#!/awips2/python/bin/python
#
# This script will grant the new awipsAdmin permission to any user
# having the old awips.user.admin permission.
#
# This will get admins assigned without requiring running a CAVE session
# as user awips or manually editing the users.ini file.
#
# All other users will be granted the default awipsUser role.
#
# This will get most users into the users.ini file so the admin
# can assign permissions without needing them to open a CAVE session
#
import errno
import glob
import os
import pwd
import re
import stat
import sys
import traceback
import xml.etree.ElementTree as ET
SETUP_ENV_PATH = "/awips2/edex/bin/setup.env"
SITEID_PATTERN = r'.*\nexport AW_SITE_IDENTIFIER=(\w+)\W.*'
SITE_ROLES_PATH = "/awips2/edex/data/utility/common_static/site/%s/roles/*.xml"
USERS_INI_PATH = "/awips2/edex/data/utility/common_static/configured/%s/roles/users.ini"
CURRENT_USERS_PATH = "/awips2/edex/data/utility/common_static/site/%s/roles/currentusers.txt"
SPECIAL_USERS = set(["ALL", "awips", "root"])
USERNAME_PATTERN = r'^[a-z_][a-z0-9_]{0,30}$'
USERS_INI_HEADER = """# -----------------------------------------------------------------------------
# This file should not be manually edited.
# Please use the user administration GUI to modify user roles/permissions.
# -----------------------------------------------------------------------------
# [users] section defines users and their (optional) assigned roles
# Users may only be assigned roles, they may not be assigned permissions.
#
# username = password, role1, role2, ..., roleN
# -----------------------------------------------------------------------------
[users]
"""
# The following dicts are for validation of the old nwsroles xml files
USER_PERMISSION = {
"tag": "userPermission",
"text": True,
}
USER_ROLE = {
"tag": "userRole",
"text": True,
}
USER = {
"tag": "user",
"text": False,
"attrib": {
"userId": (True, str),
},
"elements" : [
# (required, multiplicity, schema)
(False, 2, USER_PERMISSION),
(False, 2, USER_ROLE),
]
}
ROLE_DESCRIPTION = {
"tag": "roleDescription",
"text": True,
}
ROLE_PERMISSION = {
"tag": "rolePermission",
"text": True,
}
ROLE = {
"tag": "role",
"text": False,
"attrib": {
"roleId": (True, str)
},
"elements": [
# (required, multiplicity, schema)
(False, 1, ROLE_DESCRIPTION),
(True, 2, ROLE_PERMISSION),
]
}
PERMISSION_DESCRIPTION = {
"tag": "description",
"text": True,
}
PERMISSION = {
"tag": "permission",
"text": False,
"attrib": {
"id": (True, str)
},
"elements": [
# (required, multiplicity, schema)
(False, 1, PERMISSION_DESCRIPTION)
]
}
APPLICATION = {
"tag": "application",
"text": True,
}
NWS_ROLE_DATA = {
"tag": "nwsRoleData",
"text": False,
"elements": [
# (required, multiplicity, schema)
(True, 1, APPLICATION),
(True, 2, PERMISSION),
(False, 2, ROLE),
(False, 2, USER)
]
}
def formatElement(element):
s = "<" + element.tag
if element.attrib:
for id, value in element.items():
s += ' %s="%s"' % (id, value)
s += ">"
return s
def validateSchema(element, schema):
# validates the xml is syntactically correct based on the provided schema
# there is no validation of the content, just the format
valid = True
# validate tag
if element.tag != schema["tag"]:
print " ERROR: Unrecognized element <%s>, skipping..." % element.tag
return False
# validate text ignoring whitespace
text = element.text
if text:
text = text.strip()
if text:
if not schema["text"]:
print 'ERROR: Unexpected text %s found in element <%s>' % (repr(text), element.tag)
valid = False
elif schema["text"]:
print "ERROR: Element <%s> missing text" % element.tag
valid = False
# validate attributes
expectedAttrib = schema.get("attrib", {})
# ensure existing attributes are valid
for key in element.attrib:
if key in expectedAttrib:
value = element.attrib[key]
expectedType = expectedAttrib[key][1]
if type(value) is not expectedType:
print " ERROR: Attribute %s:[%s] of element <%s> is not of expected type %s" % \
(key, str(value), element.tag, str(expectedType))
valid = False
else:
print 'ERROR: Unexpected attribute "%s" found in element<%s>' % (key, element.tag)
valid = False
# ensure required attributes are present
for key in expectedAttrib:
required = expectedAttrib[key][0]
if required and key not in element.attrib:
print 'ERROR: Missing attribute "%s" in element <%s>' % (key, element.tag)
valid = False
# validate child elements
expectedElements = schema.get("elements", [])
# ensure existing child elements are valid
childCount = {}
for child in element:
# find matching child schema
found = False
for required, multiplicity, childSchema in expectedElements:
if child.tag == childSchema["tag"]:
found = True
# update child count
childCount[child.tag] = childCount.get(child.tag, 0) + 1
# validate child element
valid &= validateSchema(child, childSchema)
if not found:
print 'ERROR: Unexpected child element %s found in element %s' % \
(formatElement(child), formatElement(element))
valid = False
# ensure required children were found and multiplicity was valid
for required, multiplicity, childSchema in expectedElements:
count = childCount.get(childSchema["tag"], 0)
if count == 0 and required:
print 'ERROR: Element %s is missing required child element <%s>' % \
(formatElement(element), childSchema["tag"])
valid = False
elif count > 1 and multiplicity == 1:
print 'ERROR: %d <%s> child elements found in element %s where only 1 is allowed' % \
(count, childSchema["tag"], formatElement(element))
valid = False
return valid
def parseRolesPermissions(root):
permissions = {}
roles = {}
users = {}
application = root.find("application").text.strip()
# parse permissions
for permission in root.iterfind("permission"):
id = permission.attrib["id"]
description = permission.find("description")
if description is not None:
description = description.text
if description is not None:
description = description.strip()
permissions[id] = description
# parse roles
for role in root.iterfind("role"):
roleId = role.attrib["roleId"].strip()
roleDescription = role.find("roleDescription")
if roleDescription is not None:
roleDescription = roleDescription.text
if roleDescription is not None:
roleDescription = roleDescription.strip()
rolePermissions = set()
for rolePermission in role.iterfind("rolePermission"):
rolePermissions.add(rolePermission.text.strip())
roles[roleId] = {"roleDescription":roleDescription,
"rolePermissions": rolePermissions
}
# parse users
for user in root.iterfind("user"):
userId = user.attrib["userId"].strip()
userPermissions = set()
for userPermission in user.iterfind("userPermission"):
userPermissions.add(userPermission.text.strip())
userRoles = set()
for userRole in user.iterfind("userRole"):
userRoles.add(userRole.text.strip())
users[userId] = { "userRoles": userRoles,
"userPermissions": userPermissions
}
return application, permissions, roles, users
def main():
userName = pwd.getpwuid(os.getuid()).pw_name
if userName not in ['awips', 'root']:
print "ERROR: This script must be run as user root or awips"
return 1
# parse site identifier from setup.env
siteId = None
try:
with open(SETUP_ENV_PATH, "r") as env:
contents = env.read()
m = re.match(SITEID_PATTERN, contents)
if m is not None:
siteId = m.group(1)
except:
print "ERROR: Unable to read", SETUP_ENV_PATH, "exiting"
traceback.print_exc()
return 1
if siteId is None:
print "ERROR: AW_SITE_IDENTIFIER not found in", SETUP_ENV_PATH
return 1
# if users.ini already exists just exit
iniPath = USERS_INI_PATH % siteId
if os.path.exists(iniPath):
print "WARN:", iniPath, "already exists, exiting."
return 1
# loop over all user roles xml files looking for users and/or admins
allUsers = set()
admins = set()
paths = glob.glob(SITE_ROLES_PATH % siteId)
for path in paths:
print "INFO: Processing file:", path
try:
tree = ET.parse(path)
root = tree.getroot()
except:
print "ERROR: Unable to parse XML file: %s" % path
traceback.print_exc()
continue
# ensure file contains valid XML
if not validateSchema(root, NWS_ROLE_DATA):
print "ERROR:", path, "does not contain valid nwsRoleData xml, skipping"
continue
# parse out roles and permissions into pythn dicts
application, permissions, roles, users = parseRolesPermissions(root)
for user in users:
allUsers.add(user)
if application == "User Administration" and \
"awips.user.admin" in users[user]["userPermissions"]:
admins.add(user)
# set allUsers to the content of
# /awips2/edex/data/utility/common_static/site/XXX/roles/currentUsers.txt
# if it exists
currentUsersPath = CURRENT_USERS_PATH % siteId
currentUsers = None
try:
with open(currentUsersPath, 'r') as f:
currentUsers = f.readlines()
currentUsers = [x.strip() for x in currentUsers]
except IOError as e:
if e.errno == errno.ENOENT:
print "WARN: %s file not found,\n using list of users for existing roles files" % currentUsersPath
elif e.errno == errno.EACCES:
print "ERROR: Unable to read %s,\n correct file permissions and re-run this script" % currentUsersPath
return 1
else:
print "ERROR: Error reading %s,\n fix the file and re-run this script" % currentUsersPath
traceback.print_exc()
return 1
if currentUsers:
# remove None or empty strings
currentUsers = filter(None, currentUsers)
# validate user names
for user in currentUsers:
if not re.match(USERNAME_PATTERN, user):
print "ERROR: %s\n contains an invalid username: '%s'\n correct and re-run this script" % (currentUsersPath, user)
return 1
allUsers = set(currentUsers)
# remove special users
allUsers -= SPECIAL_USERS
# remove admins that are not in allUsers
admins &= allUsers
# convert allUsers set to a sorted list
# This just makes the file easier for a human
# to look at after running the delta script.
# The GUI will always sort the user names
allUsers = sorted(allUsers)
# output users.ini file
try:
dirPath = os.path.dirname(iniPath)
try:
os.makedirs(dirPath, 0750)
except OSError, e:
if e.errno != errno.EEXIST:
raise
with open(iniPath, 'w') as out:
out.write(USERS_INI_HEADER)
for user in allUsers:
role = "awipsUser"
if user in admins:
role = "awipsAdmin"
print "INFO: Granting", user, role, "role"
out.write("%s = password, %s\n" % (user, role))
os.chmod(iniPath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP)
# change owner to awips
p = pwd.getpwnam("awips")
os.chown(dirPath, p.pw_uid, p.pw_gid)
os.chown(iniPath, p.pw_uid, p.pw_gid)
except:
print "ERROR: Exception writing to %s" % iniPath
traceback.print_exc()
# remove any partially written users.ini file
if os.path.isfile(iniPath):
os.remove(iniPath)
return 1
print "INFO: Successfully migrated awips admins"
return 0
if __name__ == '__main__':
sys.exit(main())

View file

@ -1,51 +0,0 @@
#!/awips2/python/bin/python
# Removes "currentAnimationMode" elements and attributes from bundle xml files
# Author: tgurney
from __future__ import print_function
import os
import shutil
import sys
import subprocess
import traceback
import xml.etree.ElementTree as ET
def main():
print("INFO: Starting update of bundle XMLs")
paths = subprocess.check_output("find /awips2/edex/data/utility/*/*/*/bundles -type f -regex '.*\\.xml'", shell=True)
paths = paths.strip().split('\n')
for path in paths:
tree = ET.parse(path)
try:
node = tree.getroot().find('displayList').find('displays').find('descriptor')
except AttributeError as a:
# one of the elements was not found
continue
elementToRemove = node.find('currentAnimationMode')
if elementToRemove is not None or 'currentAnimationMode' in node.attrib:
try:
shutil.copyfile(path, path + ".bak")
print("INFO: Updating " + path)
node.attrib.pop('currentAnimationMode', None)
if elementToRemove is not None:
node.remove(elementToRemove)
try:
tree.write(path, encoding="UTF-8", xml_declaration=True)
except Exception:
traceback.print_exc()
print("INFO: Restoring " + path + " from backup")
shutil.copyfile(path + ".bak", path)
try:
os.remove(path + ".bak")
except Exception:
pass
except Exception:
traceback.print_exc()
continue
print("INFO: Done.")
if __name__ == '__main__':
sys.exit(main())

View file

@ -1,290 +0,0 @@
#!/awips2/python/bin/python2
##
# DR 6346 - This script will compare the site overrides for AreaDictionary.py
# and CityLocation.py to the CONFIGURED level versions and create incremental
# overrides of these files.
##
import copy
import logging
import glob
import imp
import os
import os.path
import pprint
import shutil
logging.basicConfig(format='%(asctime)-15s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG)
log = logging.getLogger("createIncrementalAreaDictionaryAndCityLocation.py")
TEXT_UTILS_BASE_PATH = "/awips2/edex/data/utility/cave_static/site/*/gfe/userPython/textUtilities/regular/"
AREA_DICT_GLOB_PATH = os.path.join(TEXT_UTILS_BASE_PATH, "AreaDictionary.py")
CITY_LOC_GLOB_PATH = os.path.join(TEXT_UTILS_BASE_PATH, "CityLocation.py")
AREA_DICT_HEADER = """
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# AreaDictionary
# AreaDictionary file
#
# Author: GFE Installation Script
# ----------------------------------------------------------------------------
##
# This is an absolute override file, indicating that a higher priority version
# of the file will completely replace a lower priority version of the file.
##
# Format:
# AreaDictionary = {
# "editArea" : {
# "ugcCode": "STZxxx",
# "ugcName": "EditAreaName",
# "ugcCities": ['City1', 'City2'],
# "ugcTimeZone": "MST7MDT",
# "fullStateName": "COLORADO",
# "partOfState": "NORTHEAST",
# "stateAbbr": "CO",
# "independentCity": 0,
# "locationName": "GeneralAreaName",
# }
# ...
# }
# ugcTimeZone: This field should be replace with the correct time zone
# for that zone. If it is different from the time zone of
# the local WFO, it's time zone will appear in the header of
# some products in parentheses.
# Using any other strings to define
# the time zone may produce undesirable results.
# The time zone may also be a list of time zones in case
# a forecast zone happens to cover an area that contains
# two time zones.
# e.g. "ugcTimeZone" : ["MST7MDT", "PST8PDT"]
#
# ugcCode: This field contains the ugc coding for this area, such as COZ023
#
# ugcName: This field contains the descriptive name for this area. It
# is used in various products, including Hazard products. This is
# the official county or zone name.
#
# locationName: This field is optional, but provides an alternate name that
# is used in the text of some products to describe the area. The
# FFA product uses this value if available.
#
# ugcCities: This field contains the list of cities for hazard and routine
# products.
#
# fullStateName: This field is used in hazard products to fully describe
# the state in which this edit area resides.
#
# partOfState: This field describes the location within a state (such as
# NORTHEAST) for this area. It is used in hazard products.
#
# stateAbbr: State Abbreviation for the fullStateName.
#
# independentCity: Set to 0 or 1. Some counties (FIPS coding) are actually
# cities. Setting the flag to 1 will instruct those formatters
# to mention independent cities, rather than include this "county"
# in the county list of the product.
#
# wfo: The wfo(s) with responsibility for the area
#
#
from DefaultAreaDictionary import AreaDictionary
"""
CITY_LOCATION_HEADER = """
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# CityLocation
# CityLocation file
#
# Author: GFE Installation Script
# ----------------------------------------------------------------------------
##
# This is an absolute override file, indicating that a higher priority version
# of the file will completely replace a lower priority version of the file.
##
# Format:
# CityLocation = {
# "editArea": {'cityName1' : (latitude, longitude),
# 'cityName2' : (latitude, longitude),
# ...
# }
# ...
# }
#
# editArea: name of edit area as in AreaDictionary
#
# cityName: name of the city - should be the same as in AreaDictionary.
#
# latitude/longitude: city's lat/lon location.
#
from DefaultCityLocation import CityLocation
"""
def create_incremental_area_dictionary():
for site_file in glob.iglob(AREA_DICT_GLOB_PATH):
log.info("Generating incremental override file [%s]...", site_file)
base_file = site_file.replace("site", "configured", 1)
if not os.path.isfile(base_file):
log.error("Could not find CONFIGURED level file [%s].", base_file)
log.error("Skipping to next file.")
continue
with open(site_file, 'r') as f:
contents = f.read()
if "from DefaultAreaDictionary import AreaDictionary" in contents:
log.info("Site AreaDictionary file [%s] has already been upgraded.", site_file)
continue
log.info("Using configured file [%s]...", base_file)
log.info("Using site file [%s]...", site_file)
configured_module = imp.load_source('base', base_file)
site_module = imp.load_source('override', site_file)
configured_dict = configured_module.AreaDictionary
site_dict = site_module.AreaDictionary
diffs = diff_dicts(configured_dict, site_dict)
log.debug("AreaDictionary Differences: %r", diffs)
write_override_file(site_file, 'AreaDictionary', diffs, AREA_DICT_HEADER)
delete_files(base_file + '*')
def create_incremental_city_location():
for site_file in glob.iglob(CITY_LOC_GLOB_PATH):
log.info("Generating incremental override file [%s]...", site_file)
base_file = site_file.replace("site", "configured", 1)
if not os.path.isfile(base_file):
log.error("Could not find CONFIGURED level file [%s].", base_file)
log.error("Skipping to next file.")
continue
with open(site_file, 'r') as f:
contents = f.read()
if "from DefaultCityLocation import CityLocation" in contents:
log.info("Site CityLocation file [%s] has already been upgraded.", site_file)
continue
log.info("Using configured file [%s]...", base_file)
log.info("Using site file [%s]...", site_file)
configured_module = imp.load_source('base', base_file)
site_module = imp.load_source('override', site_file)
configured_dict = configured_module.CityLocation
site_dict = site_module.CityLocation
diffs = diff_dicts(configured_dict, site_dict)
log.debug("CityLocation Differences: %r", diffs)
write_override_file(site_file, 'CityLocation', diffs, CITY_LOCATION_HEADER)
delete_files(base_file + '*')
def diff_dicts(base, override):
differences = []
keys = set().union(base.keys(), override.keys())
# log.debug("Combined keys: %s", keys)
for key in sorted(keys):
if key not in base:
log.debug("Key [%s] in override, but not base.", key)
differences.append((key, copy.copy(override[key]), True))
elif key not in override:
log.debug("Key [%s] in base, but not override.", key)
else:
sub_diffs = sub_diff_dicts(base[key], override[key])
if sub_diffs:
log.debug("Differences for key [%s]: %r", key, sub_diffs)
differences.append((key, sub_diffs, False))
return differences
def sub_diff_dicts(base, override, level=0):
differences = {}
keys = set().union(base.keys(), override.keys())
# log.debug("Combined keys: %s", keys)
for key in sorted(keys):
if key not in base:
log.debug("Key [%s] in override, but not base.", key)
differences[key] = copy.copy(override[key])
elif key not in override:
log.debug("Key [%s] in base, but not override.", key)
else:
if base[key] != override[key]:
differences[key] = override[key]
return differences
def write_override_file(file_name, object_name, object_value, header):
backup_file = file_name + ".bak.dr_6346"
log.info("Writing backup file [%s]", backup_file)
try:
shutil.copy(file_name, backup_file)
except:
log.exception("Unable to write backup file [%s]", backup_file)
log.error("Skipping file [%s]", file_name)
return
log.info("Writing override file [%s]", file_name)
try:
with open(file_name, 'w') as out_file:
printer = pprint.PrettyPrinter()
out_file.write(header)
for (key, value, added) in sorted(object_value, key=lambda i: i[0]):
if added:
out_file.write("{}[{!r}] = {}".format(object_name, key, printer.pformat(value)))
out_file.write('\n')
else:
for sub_key in sorted(value.keys()):
out_file.write("{}[{!r}][{!r}] = {}".format(object_name, key, sub_key, printer.pformat(value[sub_key])))
out_file.write('\n')
out_file.write('\n')
except:
log.exception("Unable to write incremental override file [%s]", file_name)
log.critical("Restore backup file [%s] to [%s] before restarting EDEX.", backup_file, file_name)
def delete_files(file_pattern):
for f in glob.iglob(file_pattern):
try:
os.remove(f)
except:
log.exception("Unable to delete file [%s].", f)
def main():
log.info("Starting delta script for DR #6346: creating incremental overrides for AreaDictionary.py and CityLocation.py...")
create_incremental_area_dictionary()
create_incremental_city_location()
log.info("Delta script complete.")
if __name__ == '__main__':
main()

View file

@ -1,50 +0,0 @@
#!/bin/bash
# This script adds two columns 'minVersionRequired' and 'maxVersionRequired' to
# the backup_job table, with the current EDEX version as the default value for
# each
version=$(rpm -q awips2-database --qf %{VERSION})
if [[ $? -ne 0 ]]; then
echo ERROR: Failed to get EDEX version. Cannot continue
exit 1
fi
has_column() {
table_name=${1}
column_name=${2}
result=$(psql --user=awipsadmin --db=metadata -Aqtc "
select 1
from information_schema.columns c
where c.table_name = '${table_name}'
and c.column_name='${column_name}';")
[[ ${result} == "1" ]]
return $?
}
has_column backup_job minversionrequired
if [[ $? -ne 0 ]]; then
echo INFO: Adding column minversionrequired
psql --user=awipsadmin --db=metadata -1 << EOF
\set ON_ERROR_STOP on
alter table backup_job add column minVersionRequired varchar(16);
update backup_job set minVersionRequired = '${version}' where minVersionRequired is null;
alter table backup_job alter column minVersionRequired set not null;
EOF
else
echo INFO: minversionrequired column already exists.
fi
has_column backup_job maxversionrequired
if [[ $? -ne 0 ]]; then
echo INFO: Adding column maxversionrequired
psql --user=awipsadmin --db=metadata -1 << EOF
\set ON_ERROR_STOP on
alter table backup_job add column maxVersionRequired varchar(16);
update backup_job set maxVersionRequired = '${version}' where maxVersionRequired is null;
alter table backup_job alter column maxVersionRequired set not null;
EOF
else
echo INFO: maxversionrequired column already exists.
fi

View file

@ -1,22 +0,0 @@
#!/bin/bash
#
# DR 6355
# relocate volumebrowser/LevelMappingFile.xml into level/mappings/
#
BASE='/awips2/edex/data/utility/'
DEST='level/mappings/'
for file in `find $BASE -wholename \*/volumebrowser/LevelMappingFile.xml`;
do
dir=`echo $file | sed 's/\/volumebrowser\/LevelMappingFile.xml//g'`;
destDir=${dir}/${DEST}
if [ ! -d "${destDir}" ];
then
sudo -u awips mkdir -p ${destDir};
fi
echo "Moving ${file} to ${destDir}";
mv $file $destDir;
done;

View file

@ -1,45 +0,0 @@
#!/bin/sh
# DR #6362 - This script creates the alaska_marine view
SIMPLEVS=("0.064" "0.016" "0.004" "0.001")
#
# Ensure simplification levels exist for both marinezones and offshore
#
for LEV in "${SIMPLEVS[@]}" ; do
echo " Creating simplified geometry level $LEV ..."
SUFFIX=${LEV/./_}
/awips2/psql/bin/psql -d maps -U awipsadmin -q -c "
DO \$\$
BEGIN
BEGIN
PERFORM AddGeometryColumn('mapdata','marinezones','the_geom_${SUFFIX}','4326',(SELECT type FROM public.geometry_columns WHERE f_table_schema='mapdata' and f_table_name='marinezones' and f_geometry_column='the_geom'),2);
UPDATE mapdata.marinezones SET the_geom_${SUFFIX}=ST_Segmentize(ST_Multi(ST_SimplifyPreserveTopology(the_geom,${LEV})),0.1);
CREATE INDEX marinezones_the_geom_${SUFFIX}_gist ON mapdata.marinezones USING gist(the_geom_${SUFFIX});
EXCEPTION
WHEN duplicate_column THEN
END;
BEGIN
PERFORM AddGeometryColumn('mapdata','offshore','the_geom_${SUFFIX}','4326',(SELECT type FROM public.geometry_columns WHERE f_table_schema='mapdata' and f_table_name='offshore' and f_geometry_column='the_geom'),2);
UPDATE mapdata.offshore SET the_geom_${SUFFIX}=ST_Segmentize(ST_Multi(ST_SimplifyPreserveTopology(the_geom,${LEV})),0.1);
CREATE INDEX offshore_the_geom_${SUFFIX}_gist ON mapdata.offshore USING gist(the_geom_${SUFFIX});
EXCEPTION
WHEN duplicate_column THEN
END;
END;
\$\$"
done
#
# Create the alaska_marine view
#
/awips2/psql/bin/psql -d maps -U awipsadmin -q -c "
DROP VIEW IF EXISTS mapdata.alaska_marine;
CREATE OR REPLACE VIEW mapdata.alaska_marine AS
SELECT CAST(ROW_NUMBER() OVER(ORDER BY id) AS INT) GID, * FROM (
SELECT id, wfo, name, lat, lon,
the_geom, the_geom_0, the_geom_0_064, the_geom_0_016, the_geom_0_004, the_geom_0_001
FROM mapdata.marinezones WHERE wfo LIKE '%AFC%' or wfo LIKE '%AFG%' or wfo LIKE '%AJK%'
UNION
SELECT id, wfo, name, lat, lon,
the_geom, the_geom_0, the_geom_0_064, the_geom_0_016, the_geom_0_004, the_geom_0_001
FROM mapdata.offshore WHERE wfo LIKE '%AFC%' or wfo LIKE '%AFG%' or wfo LIKE '%AJK%'
) a;
"

View file

@ -1,90 +0,0 @@
#!/bin/bash
# 6372 - Move AbstractMenuUtil menu localization files to common_static.
#
# Author: mapeters
# Aug 21, 2017
# files under menus/ dir that are handled by AbstractMenuUtil
declare -a menu_files=("ffmp/ffmp.xml"
"scan/scan.xml"
"fog/baseFog.xml"
"safeseas/baseSafeSeas.xml"
"snow/baseSnow.xml"
"satellite/baseDerivedProductsImagery.xml"
"satellite/baseComposite.xml"
"upperair/baseRAOB.xml"
"upperair/uaMenus.xml"
"upperair/.upperairMenuTime"
"radar/index.xml"
"radar/dialRadars.xml"
"radar/airportRadars.xml"
"radar/radarindex.xml"
"radar/.radarMenuTime"
"ncepHydro/cpc/cpcMenus.xml"
"ncepHydro/spc/hazardMenus.xml"
"ncepHydro/hpc/hpcMenus.xml"
"ncepHydro/mpc/mpcMenus.xml"
"ncepHydro/nco/ncoMenus.xml"
"ncepHydro/cpc/.ncepHydro/cpcMenuTime"
"ncepHydro/spc/.ncepHydro/spcMenuTime"
"ncepHydro/hpc/.ncepHydro/hpcMenuTime"
"ncepHydro/mpc/.ncepHydro/mpcMenuTime"
"ncepHydro/nco/.ncepHydro/ncoMenuTime"
)
function get_fs() {
df -P -- "$1" | awk 'NR==2 {print $1}'
}
did_work=false
echo INFO: Moving AbstractMenuUtil menu localization files to common_static
for menu_file in "${menu_files[@]}"; do
for old_location in /awips2/edex/data/utility/cave_static/*/*/menus/"${menu_file}"; do
if [[ ! -e "${old_location}" ]]; then
continue
fi
new_location=${old_location/cave_static/common_static}
echo INFO: Moving "${old_location}" to "${new_location}"
did_work=true
new_location_dir=$(dirname "${new_location}")
if [[ ! -d "${new_location_dir}" ]]; then
sudo -u awips mkdir -p -m 750 "${new_location_dir}"
else
echo "WARN: ${new_location} already exists. Just copying newer files"
fi
if rsync -aux "${old_location}" "${new_location}"; then
# cut context directory (e.g. /awips2/.../site/OAX) from old_location
old_location_ctx_dir=$(echo "${old_location}" | cut -d/ -f-8)
# filesystem that context dir is on
ctx_fs=$(get_fs "${old_location_ctx_dir}")
# remove moved file if on same filesystem as context dir
file_fs=$(get_fs "${old_location}")
if [[ "${file_fs}" = "${ctx_fs}" ]]; then
rm -f "${old_location}" "${old_location}".md5
fi
old_location_dir=$(dirname "${old_location}")
# remove empty directories up until the context dir
while [[ "${old_location_ctx_dir}" != "${old_location_dir}" ]]; do
# filesystem that dir is on
dir_fs=$(get_fs "${old_location_dir}")
if [[ "${dir_fs}" = "${ctx_fs}" ]]; then
# remove empty directory if on same filesystem
# stop moving up directory tree if we find non-empty directory
rmdir "${old_location_dir}" 2>/dev/null || break
fi
old_location_dir=$(dirname "${old_location_dir}")
done
fi
rm -f "${new_location}".md5
echo INFO: Done moving "${old_location}" to "${new_location}"
done
done
if [[ "${did_work}" == "false" ]]; then
echo INFO: There are no files to move. Did nothing
else
echo INFO: Done moving localization files
fi

View file

@ -1,32 +0,0 @@
#!/bin/bash
# 6373 - Move SCAN localization files to common_static.
#
# Author: tgurney
# Aug 11, 2017
did_work=false
echo INFO: Moving SCAN localization files to common_static
for old_location in /awips2/edex/data/utility/cave_static/*/*/scan; do
if [[ ! -e "${old_location}" ]]; then
continue
fi
new_location=${old_location/cave_static/common_static}
echo INFO: Moving "${old_location}" to "${new_location}"
did_work=true
if [[ ! -d "${new_location}" ]]; then
sudo -u awips mkdir -p -m 750 "${new_location}"
else
echo "WARN: ${new_location} already exists. Just copying newer files"
fi
rsync -aux "${old_location}" "${new_location}/.." &&
rm -rf --one-file-system "${old_location}"
find "${new_location}" -xdev -type f -name '*.md5' -delete
echo INFO: Done moving "${old_location}" to "${new_location}"
done
if [[ "${did_work}" == "false" ]]; then
echo INFO: There are no files to move. Did nothing
else
echo INFO: Done moving localization files
fi

View file

@ -1,36 +0,0 @@
#!/bin/bash
# 6347 - Move D2D tools localization files to common_static.
#
# Mostly copied from DR 6183's delta script
#
# Author: njensen
# August 02, 2017
did_work=false
echo INFO: Moving D2D tool localization files to common_static
for old_location in /awips2/edex/data/utility/cave_static/*/*/awipsTools; do
if [[ ! -e "${old_location}" ]]; then
continue
fi
new_location=${old_location/cave_static/common_static}
echo INFO: Moving "${old_location}" to "${new_location}"
did_work=true
if [[ ! -d "${new_location}" ]]; then
sudo -u awips mkdir -p "${new_location}" -m 750
else
echo "WARN: ${new_location} already exists. Just copying newer files"
fi
rsync -aux "${old_location}" "${new_location}/.." &&
rm -rf --one-file-system "${old_location}"
find "${new_location}" -xdev -type f -name '*.md5' -delete
find "${new_location}" -xdev -type f -name '*.pyc' -delete
find "${new_location}" -xdev -type f -name '*.pyo' -delete
echo INFO: Done moving "${old_location}" to "${new_location}"
done
if [[ "${did_work}" == "false" ]]; then
echo INFO: There are no files to move. Did nothing
else
echo INFO: Done moving localization files
fi

View file

@ -1,21 +0,0 @@
#!/bin/bash
# 6402 - Add an index on source to the binlightning table in the metadata database.
#
# Author: mapeters
# Oct 31, 2017
psql=/awips2/psql/bin/psql
index=binlightning_sourceIndex
table=awips.binlightning
sql="CREATE INDEX IF NOT EXISTS ${index} ON ${table} USING btree(source);"
${psql} -U awipsadmin -d metadata -c "${sql}"
if [[ $? -ne 0 ]]; then
echo "ERROR: Failed to create index ${index} on table ${table}."
echo "FATAL: The update has failed."
exit 1
fi
echo "INFO: the update has completed successfully!"
exit 0

View file

@ -1,56 +0,0 @@
#!/bin/bash
# #6507 - This script moves all postgres tablespaces to
# /awips2/data/tablespaces. It will also create a symlink from
# /awips2/database/data to /awips2/data if the latter does not exist already.
#
# Run this script as root on all servers that have /awips2/data.
#
# Author: tgurney
pg_ctl=/awips2/postgresql/bin/pg_ctl
old_data_dir=/awips2/data
new_data_dir=/awips2/database/data
new_tablespace_dir=/awips2/database/tablespaces
pg_tblspc=${new_data_dir}/pg_tblspc
su - awips -c "${pg_ctl} -D ${new_data_dir} status"
if [[ "$?" -eq 0 ]]; then
echo "ERROR: Postgres is running. Cannot continue."
exit 1
fi
if [[ -e "${old_data_dir}" && ! -h "${old_data_dir}" ]]; then
echo "ERROR: ${old_data_dir} exists and is not a symlink. It needs to be"
echo "moved to ${new_data_dir} first."
exit 1
fi
echo INFO: Starting.
if [[ ! -h "${old_data_dir}" ]]; then
echo "INFO: Symlinking ${new_data_dir} to ${old_data_dir}"
ln -sf "${new_data_dir}" "${old_data_dir}"
chown awips:fxalpha "${old_data_dir}"
fi
# move tablespaces
mkdir -p ${new_tablespace_dir}
chown -R awips:fxalpha ${new_tablespace_dir}
chmod 700 ${new_tablespace_dir}
for ts_link in "${pg_tblspc}"/* ; do
if [[ -h ${ts_link} ]]; then
this_ts=$(readlink "${ts_link}")
if [[ "$(dirname "${this_ts}")" != "${new_tablespace_dir}" ]]; then
mv -v "${this_ts}" "${new_tablespace_dir}" && \
rm -vf "${ts_link}" && \
ln -sv "${new_tablespace_dir}/$(basename ${this_ts})" ${ts_link}
chown awips:fxalpha ${ts_link}
else
echo "INFO: ${this_ts} already in correct location"
fi
fi
done
echo INFO: Done.

View file

@ -1,11 +0,0 @@
#!/bin/bash
# #7424 - This script adds an index to the grid_info table's location column to
# allow for faster lookup and deletion of large numbers of grid_coverage.
#
# NOTE: This script will error if the index already exists, but this will not
# negatively impact the system.
# run the update
echo "DR #7427 - Adding index to grid_info.location..."
/awips2/psql/bin/psql -U awipsadmin -d metadata -c "CREATE INDEX grid_info_location_id_index ON grid_info USING btree (location_id);"

View file

@ -1,156 +0,0 @@
#!/usr/bin/python
#
"""
Convert to append the coverageid to the satellite datauris, and
modify the associated satellite hdf5 group names to append the
coverageid. The new groups are added as an alias to the existing
datasets.
Date Ticket# Engineer Description
------------ ---------- ----------- --------------------------
20120711 798 jkorman Initial Development
20120822 798 jkorman Added ability to account for files containing hdfFileId.
"""
from subprocess import Popen, PIPE
import sys
from time import time
import h5py
POSTGRES_CMD = "psql -U awips -d metadata -t -q -A -c "
HDF5_LOC = "/awips2/edex/data/hdf5"
ID_IDX = 0
DATAURI_IDX = 1
COVERAGE_IDX = 2
SECTORID_IDX = 3
PHYSICAL_ELEMENT_IDX = 4
REFTIME_IDX = 5
HDFFILEID_IDX = 6
def update_satellite_table():
"""
Add the interpolationLevels column to the satellite table.
"""
result = queryPostgres("select count(*) from information_schema.columns where table_name='satellite' and column_name='interpolationlevels';")
if(result[0][0] == '0'):
result = queryPostgres("alter table satellite add column interpolationlevels integer;")
print "Adding interpolationlevels column to satellite table"
def formatFileTime(refTime):
"""
Extract and format the year (YYYY), month (MM), day (DD), and hour (HH)
from the reference time. The output is formatted as YYYY-MM-DD-HH
"""
return refTime[0:4] + "-" + refTime[5:7] + "-" + refTime[8:10] + "-" + refTime[11:13]
def getFilename(refTime, h5FileId):
"""
Create the satellite data hdf filename corresponding to the given reference time and an
possible hdf5 file identifier.
"""
if(len(h5FileId) > 0):
filename = "satellite-" + formatFileTime(refTime) + "-" + h5FileId + ".h5"
else:
filename = "satellite-" + formatFileTime(refTime) + ".h5"
return filename
def queryPostgres(sql):
"""
Extract and format the year (YYYY), month (MM), day (DD), and hour (HH)
from the reference time. The output is formatted as YYYY-MM-DD-HH
"""
result = Popen(POSTGRES_CMD + "\"" + sql + "\"", stdout=PIPE, shell=True)
retVal = []
for line in result.stdout:
retVal.append(line.strip().split("|"))
return retVal
def get_sectorids():
"""
Get a list of unique sector identifiers from the satellite table.
"""
return queryPostgres("select distinct sectorid from satellite;")
def get_satellite_rows(sectorid):
"""
Extract and format the year (YYYY), month (MM), day (DD), and hour (HH)
from the reference time. The output is formatted as YYYY-MM-DD-HH
"""
keys = {}
rows = queryPostgres("select id, dataURI, coverage_gid, sectorid, physicalelement, reftime, hdffileid from satellite where sectorid=" + repr(sectorid) + ";")
for row in rows:
# create the key for this entry.
key = "/satellite/" + row[SECTORID_IDX] + "/" + row[PHYSICAL_ELEMENT_IDX] + "/" + getFilename(row[REFTIME_IDX], row[HDFFILEID_IDX])
#print "key = " + key
# have we found this key already?
if(key in keys):
# if so, get the row list for this key
rowList = keys[key]
else:
# otherwise create an empty list to put the row in
rowList = []
# add it to the collection
keys[key] = rowList
# and add the row to the list
rowList.append(row)
return keys
def process_all_satellite():
"""
Process all entries in the satellite table.
Do one sector id at a time.
"""
sectorids = get_sectorids()
if(sectorids):
for sectorid in sectorids:
print "Processing sector " + sectorid[0]
keys = get_satellite_rows(sectorid[0])
if(keys):
for key in keys:
print "=========================================================="
print " Processing key = " + key
fname = HDF5_LOC + key
try:
f = h5py.File(fname,'r+')
for row in keys[key]:
dataURI = row[1]
coverage = row[2]
newGroupName = dataURI + "/" + coverage
group = f.create_group(newGroupName)
group = f.create_group(newGroupName + "/Data-interpolated")
oldds = row[DATAURI_IDX] + "/Data"
newds = newGroupName + "/Data"
# Link to the old data set
f[newds] = h5py.SoftLink(oldds)
group = f[row[DATAURI_IDX] + "/Data-interpolated"]
numLevels = 0
for n in group.keys():
newds = newGroupName + "/Data-interpolated/" + n
if (n == '0'):
# special case for this link.
# dataset /Data-interpolated/0 points to /Data
oldds = row[DATAURI_IDX] + "/Data"
else:
numLevels += 1
oldds = row[DATAURI_IDX] + "/Data-interpolated/" + n
f[newds] = h5py.SoftLink(oldds)
updateSql = "update satellite set datauri='" + row[DATAURI_IDX] + "/" + row[COVERAGE_IDX] + "'"
updateSql += ", interpolationlevels=" + repr(numLevels)
updateSql += " where id=" + row[ID_IDX] + ";"
queryPostgres(updateSql)
f.close()
except Exception, e:
print e
print "Error occurred processing file " + fname
else:
print "No keys found for the sector id " + sectorid[0]
else:
print "No sector identifiers found in the satellite table"
if __name__ == '__main__':
t = time()
update_satellite_table()
process_all_satellite()
print "Total Conversion time %ds" % (time() - t)

Some files were not shown because too many files have changed in this diff Show more