Merge "Issue #2021 - add a .project file so files.native can be imported as an Eclipse project. Remove duplicate hydro filesystem in rary.ohd.filesystem - it used to be merged into the tar file." into development
Former-commit-id:ffa5962f52
[formerlyeeff0a13e1
[formerly 92f888181ea2d4e4dbfb44623c58a2a547b7e1d8]] Former-commit-id:eeff0a13e1
Former-commit-id:50db54dfeb
This commit is contained in:
commit
7f56d86586
455 changed files with 11 additions and 43311 deletions
11
nativeLib/files.native/.project
Normal file
11
nativeLib/files.native/.project
Normal file
|
@ -0,0 +1,11 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>files.native</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,209 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<?fileVersion 4.0.0?>
|
||||
|
||||
<cproject>
|
||||
<storageModule moduleId="org.eclipse.cdt.core.settings">
|
||||
<cconfiguration id="0.1790993038">
|
||||
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.1790993038" moduleId="org.eclipse.cdt.core.settings" name="i386-pc-linux-gnu.debug">
|
||||
<externalSettings/>
|
||||
<extensions>
|
||||
<extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.MakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
</extensions>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
<configuration artifactName="rary.ohd.filesystem" buildProperties="" description="" id="0.1790993038" name="i386-pc-linux-gnu.debug" parent="org.eclipse.cdt.build.core.prefbase.cfg">
|
||||
<folderInfo id="0.1790993038." name="/" resourcePath="">
|
||||
<toolChain id="org.eclipse.cdt.build.core.prefbase.toolchain.1790834611" name="No ToolChain" resourceTypeBasedDiscovery="false" superClass="org.eclipse.cdt.build.core.prefbase.toolchain">
|
||||
<targetPlatform id="org.eclipse.cdt.build.core.prefbase.toolchain.1790834611.1721242410" name=""/>
|
||||
<builder id="org.eclipse.cdt.build.core.settings.default.builder.419859915" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.libs.1927386011" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs"/>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.60971203" name="Assembly" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.492879294" languageId="org.eclipse.cdt.core.assembly" languageName="Assembly" sourceContentType="org.eclipse.cdt.core.asmSource" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.562885986" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.480334028" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1910254409" name="GNU C" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.289311885" languageId="org.eclipse.cdt.core.gcc" languageName="GNU C" sourceContentType="org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
|
||||
</tool>
|
||||
</toolChain>
|
||||
</folderInfo>
|
||||
</configuration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="scannerConfiguration">
|
||||
<autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId="org.eclipse.cdt.make.core.GCCStandardMakePerProjectProfile"/>
|
||||
<profile id="org.eclipse.cdt.make.core.GCCStandardMakePerProjectProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/${specs_file}" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.make.core.GCCStandardMakePerFileProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="makefileGenerator">
|
||||
<runAction arguments="-f ${project_name}_scd.mk" command="make" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCManagedMakePerProjectProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/${specs_file}" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCManagedMakePerProjectProfileCPP">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.cpp" command="g++" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCManagedMakePerProjectProfileC">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.c" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCWinManagedMakePerProjectProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/${specs_file}" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCWinManagedMakePerProjectProfileCPP">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.cpp" command="g++" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCWinManagedMakePerProjectProfileC">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.c" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<scannerConfigBuildInfo instanceId="0.1790993038">
|
||||
<autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId="org.eclipse.cdt.make.core.GCCStandardMakePerProjectProfile"/>
|
||||
<profile id="org.eclipse.cdt.make.core.GCCStandardMakePerProjectProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/${specs_file}" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.make.core.GCCStandardMakePerFileProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="makefileGenerator">
|
||||
<runAction arguments="-f ${project_name}_scd.mk" command="make" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCManagedMakePerProjectProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/${specs_file}" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCManagedMakePerProjectProfileCPP">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.cpp" command="g++" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCManagedMakePerProjectProfileC">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.c" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCWinManagedMakePerProjectProfile">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/${specs_file}" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCWinManagedMakePerProjectProfileCPP">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.cpp" command="g++" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
<profile id="org.eclipse.cdt.managedbuilder.core.GCCWinManagedMakePerProjectProfileC">
|
||||
<buildOutputProvider>
|
||||
<openAction enabled="true" filePath=""/>
|
||||
<parser enabled="true"/>
|
||||
</buildOutputProvider>
|
||||
<scannerInfoProvider id="specsFile">
|
||||
<runAction arguments="-E -P -v -dD ${plugin_state_location}/specs.c" command="gcc" useDefault="true"/>
|
||||
<parser enabled="true"/>
|
||||
</scannerInfoProvider>
|
||||
</profile>
|
||||
</scannerConfigBuildInfo>
|
||||
</storageModule>
|
||||
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
|
||||
</cconfiguration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
<project id="rary.ohd.filesystem.null.837652002" name="rary.ohd.filesystem"/>
|
||||
</storageModule>
|
||||
</cproject>
|
|
@ -1,84 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>rary.ohd.filesystem</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.python.pydev.PyDevBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
|
||||
<triggers>clean,full,incremental,</triggers>
|
||||
<arguments>
|
||||
<dictionary>
|
||||
<key>?name?</key>
|
||||
<value></value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.append_environment</key>
|
||||
<value>true</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.autoBuildTarget</key>
|
||||
<value>all</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.buildArguments</key>
|
||||
<value></value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.buildCommand</key>
|
||||
<value>make</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.cleanBuildTarget</key>
|
||||
<value>clean</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.contents</key>
|
||||
<value>org.eclipse.cdt.make.core.activeConfigSettings</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.enableAutoBuild</key>
|
||||
<value>false</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.enableCleanBuild</key>
|
||||
<value>true</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.enableFullBuild</key>
|
||||
<value>true</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.fullBuildTarget</key>
|
||||
<value>all</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.stopOnError</key>
|
||||
<value>true</value>
|
||||
</dictionary>
|
||||
<dictionary>
|
||||
<key>org.eclipse.cdt.make.core.useDefaultBuildCmd</key>
|
||||
<value>true</value>
|
||||
</dictionary>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.cdt.core.ccnature</nature>
|
||||
<nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
|
||||
<nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
|
||||
<nature>org.eclipse.cdt.core.cnature</nature>
|
||||
<nature>org.python.pydev.pythonNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<?eclipse-pydev version="1.0"?>
|
||||
|
||||
<pydev_project>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||
</pydev_project>
|
|
@ -1,3 +0,0 @@
|
|||
#Sun Mar 07 16:21:30 CST 2010
|
||||
eclipse.preferences.version=1
|
||||
environment/project/0.1790993038=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?>\n<environment append\="true" appendContributed\="true">\n<variable delimiter\=";" name\="INSTALL_FROM" operation\="append" value\="awips"/>\n<variable delimiter\=";" name\="INSTALL_TO" operation\="replace" value\="../build.native/i386-pc-linux-gnu/awips"/>\n</environment>\n
|
|
@ -1,80 +0,0 @@
|
|||
#!/bin/sh
|
||||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
||||
# Script: check_app_Context
|
||||
# Script that determines and sets the APP_CONTEXT variable for the
|
||||
# calling script, and checks if the setting for that value is set to 'ON' or 'OFF'
|
||||
# If off, this script will exit the calling script.
|
||||
|
||||
# get contextName from script name
|
||||
oldIFS="$IFS"
|
||||
IFS="/"
|
||||
n=0
|
||||
for element in ${0}
|
||||
do
|
||||
n=`expr $n + 1`
|
||||
eval pathPart$n="$element"
|
||||
done
|
||||
eval contextName='$pathPart'$n
|
||||
|
||||
# parse off file extension if necessary
|
||||
IFS="."
|
||||
n=0
|
||||
for element in $contextName
|
||||
do
|
||||
n=`expr $n + 1`
|
||||
eval contextName="$element"
|
||||
break
|
||||
done
|
||||
|
||||
# if APP_CONTEXT is not set
|
||||
if [ "${APP_CONTEXT:-NOTSET}" == "NOTSET" ]
|
||||
then
|
||||
eval APP_CONTEXT=${contextName}
|
||||
else
|
||||
# check if APP_CONTEXT ends with script's context
|
||||
# if not, then append it
|
||||
IFS="."
|
||||
n=0
|
||||
for element in $APP_CONTEXT
|
||||
do
|
||||
n=`expr $n + 1`
|
||||
eval currentContextName="$element"
|
||||
done
|
||||
if [ "${currentContextName}" != "${contextName}" ]
|
||||
then
|
||||
APP_CONTEXT=${APP_CONTEXT}'.'${contextName}
|
||||
fi
|
||||
fi
|
||||
IFS="${oldIFS}"
|
||||
export APP_CONTEXT
|
||||
|
||||
# Determine if on or off from APP_CONTEXT. Default APP_CONTEXT is ON
|
||||
eval APP_VAR=$(runso rary.ohd.util gad $APP_CONTEXT)
|
||||
echo "App Execution Token for script ${contextName} with App Context ${APP_CONTEXT} is ${APP_VAR:-ON}"
|
||||
|
||||
# if set to not run, exit the calling script
|
||||
if [ "${APP_VAR:-ON}" == "OFF" ]
|
||||
then
|
||||
echo "Script ${contextName} will exit and not run."
|
||||
exit
|
||||
fi
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
525
|
||||
440
|
||||
81
|
||||
91
|
|
@ -1,6 +0,0 @@
|
|||
xxx xxx -1 5
|
||||
40.5 97.0
|
||||
40.5 79.0
|
||||
28.5 79.0
|
||||
28.5 97.0
|
||||
40.5 97.0
|
|
@ -1,6 +0,0 @@
|
|||
xxx xxx -1 5
|
||||
43.3 -98.4
|
||||
42.9 -94.2
|
||||
39.9 -98.9
|
||||
39.4 -95.0
|
||||
43.3 -98.4
|
|
@ -1,6 +0,0 @@
|
|||
xxx xxxxxx -1 5
|
||||
40.5 97.0
|
||||
39.8 93.6
|
||||
38.4 88.1
|
||||
37.1 85.0
|
||||
36.7 82.4
|
|
@ -1,40 +0,0 @@
|
|||
Alexander City 32.57 85.57
|
||||
Anniston AP 33.35 85.51
|
||||
Auburn 32.36 85.30
|
||||
Birmingham AP 33.34 86.45
|
||||
Decatur 34.37 86.59
|
||||
Dothan AP 31.19 85.27
|
||||
Florence AP 34.48 87.40
|
||||
Gadsden 34.1 86.0
|
||||
Huntsville AP 34.42 86.35
|
||||
Mobile AP 30.41 88.15
|
||||
Mobile Co 30.40 88.15
|
||||
Montgomery AP 32.23 86.22
|
||||
Selma-Craig AFB 32.20 87.59
|
||||
Talladega 33.27 86.6
|
||||
Tuscaloosa AP 33.13 87.37
|
||||
Anchorage AP 61.10 150.1
|
||||
Barrow (S) 71.18 156.47
|
||||
Fairbanks AP (S) 64.49 147.52
|
||||
Juneau AP 58.22 134.35
|
||||
Kodiak 57.45 152.29
|
||||
Nome AP 64.30 165.26
|
||||
Douglas AP 31.27 109.36
|
||||
Flagstaff AP 35.8 111.40
|
||||
Fort Huachuca AP (S) 31.35 110.20
|
||||
Kingman AP 35.12 114.1
|
||||
Nogales 31.21 110.55
|
||||
Phoenix AP (S) 33.26 112.1
|
||||
Prescott AP 34.39 112.26
|
||||
Tucson AP (S) 32.7 110.56
|
||||
Winslow AP 35.1 110.44
|
||||
Yuma AP 32.39 114.37
|
||||
Blytheville AFB 35.57 89.57
|
||||
Camden 33.36 92.49
|
||||
El Dorado AP 33.13 92.49
|
||||
Fayetteville AP 36.0 94.10
|
||||
Fort Smith AP 35.20 94.22
|
||||
Hot Springs 34.29 93.6
|
||||
Jonesboro 35.50 90.42
|
||||
Little Rock AP (S) 34.44 92.14
|
||||
Pine Bluff AP 34.18 92.5
|
|
@ -1,4 +0,0 @@
|
|||
561
|
||||
130
|
||||
491
|
||||
437
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/ksh
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
runso rary.ohd.util create_bas_bound_main $1 $2
|
|
@ -1,399 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
#
|
||||
# script for gathering DHR products from fxa directories
|
||||
# and moving them to the OHD precip_proc world
|
||||
# October 16, 2007
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# this script is modified based on DPAgather script.
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export DSP_PROD_DIR=$(get_apps_defaults dsp_prod_dir)
|
||||
export DHR_LOG_DIR=$(get_apps_defaults dhr_log_dir)
|
||||
export DHR_DIRNAME2=$(get_apps_defaults dhr_dirname2)
|
||||
export DHR_DIRNAME1=$(get_apps_defaults dhr_dirname1)
|
||||
export DHR_PROD_DIR=$(get_apps_defaults dhr_prod_dir)
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
|
||||
#
|
||||
# define function for checking for a stop file to bypass the gather process
|
||||
|
||||
checkcronstop()
|
||||
{
|
||||
STOP_DIR=$1
|
||||
test -r $STOP_DIR/stop_hpe_crons
|
||||
FILES_FOUND=$?
|
||||
if test $FILES_FOUND -eq 0
|
||||
then
|
||||
|
||||
return 1
|
||||
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
checkHPErun()
|
||||
{
|
||||
# Check to see if age on file is more than 10 minutes
|
||||
# If it is stop the start_hpe script until data arrives.
|
||||
tnow=`date +%s`
|
||||
tfile=`date +%s -r $HPE_LOG_DIR/processhpe_log`
|
||||
agesecs=$(($tnow-$tfile))
|
||||
echo "Checking age of processhpe_log $agesecs secs " >> $hpefnm
|
||||
if [[ $agesecs -ge 600 ]]
|
||||
then
|
||||
return 1
|
||||
else
|
||||
return 2
|
||||
fi
|
||||
}
|
||||
|
||||
# setup log files
|
||||
#
|
||||
|
||||
hpefnm=$HPE_LOG_DIR/start_hpe_log
|
||||
hpetmp=$HPE_LOG_DIR/start_hpe_tmp
|
||||
fnm=$DHR_LOG_DIR/dhrgather_log
|
||||
tmp=$DHR_LOG_DIR/dhrgather_tmp
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
if [[ -s $fnm ]]
|
||||
then
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
fi
|
||||
|
||||
if [[ -s $hpefnm ]]
|
||||
then
|
||||
tail -1200 $hpefnm > $hpetmp
|
||||
mv $hpetmp $hpefnm
|
||||
fi
|
||||
|
||||
checkcronstop $HPE_LOG_DIR
|
||||
STOP_FOUND=$?
|
||||
|
||||
if test STOP_FOUND -eq 1
|
||||
then
|
||||
|
||||
# Instead of using kill, will introduce a delayed stop.
|
||||
# This is especially important if radar files are separate in time
|
||||
# and only 1 radar is indicating precip but the rest are not
|
||||
# The time of this file is checked in start_hpe script.
|
||||
# If greater than 20 min, the start_hpe script will exit
|
||||
|
||||
if [[ ! -r $HPE_LOG_DIR/stop_hpe_process ]]
|
||||
then
|
||||
touch $HPE_LOG_DIR/stop_hpe_process
|
||||
echo "stop_hpe_process file created " $Dte >> $hpefnm
|
||||
echo "HPE decoders and field generator will stop after 20 min " >> $hpefnm
|
||||
echo "unless precipitation occurs at a radar before that time " >> $hpefnm
|
||||
fi
|
||||
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# Don't fire up another instance if one is underway
|
||||
# the AMIRUNNING_DIR variable is required by the amirunning script
|
||||
|
||||
AMIRUNNING_DIR=$DHR_LOG_DIR
|
||||
|
||||
. $RFCLX/public/bin/amirunning
|
||||
|
||||
|
||||
if [[ $AmIRunning = "yes" ]]; then
|
||||
echo DHRgather script cancelled due to other instance running >> $fnm
|
||||
exit 0
|
||||
fi
|
||||
dhrps=`ps -fe | grep DHRgather | grep -v grep | grep -v vi | wc -l`
|
||||
if [[ $dhrps -gt 1 ]]
|
||||
then
|
||||
echo ""
|
||||
fi
|
||||
|
||||
#
|
||||
# begin write to log
|
||||
#
|
||||
|
||||
echo "-------------------------------------------" >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "Begin gather at "$Dte >> $fnm
|
||||
|
||||
#if [[ ! -d $DHR_PROD_DIR ]]
|
||||
#then
|
||||
# echo "The DHR PROD DIR $DHR_PROD_DIR is invalid or " >> $fnm
|
||||
# echo "does not exist. Exiting DHRgather script. " $Dte >> $fnm
|
||||
# exit 1
|
||||
#fi
|
||||
|
||||
#
|
||||
# touch dhrgather control temp file
|
||||
#
|
||||
|
||||
touch $DHR_LOG_DIR/dhrgather_control.temp
|
||||
touch /tmp/dhrgather_control.temp
|
||||
|
||||
#
|
||||
# check if radarloc table has any radars with use_radar="T"
|
||||
# if not, then stop
|
||||
# the sed command strips off all non-numeric characters
|
||||
#
|
||||
|
||||
Num_Rad=$(
|
||||
echo "select count(*) from Radarloc WHERE use_radar='T'; " | \
|
||||
$POSTGRESQLBINDIR/psql -q -t $DB_NAME
|
||||
)
|
||||
|
||||
if [[ $Num_Rad = 0 ]]
|
||||
then
|
||||
|
||||
echo no radars defined for use -- gather not done >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
exit 1
|
||||
|
||||
elif [[ -z $Num_Rad ]]
|
||||
then
|
||||
|
||||
echo number of radars defined for use is undetermined -- gather continuing >> $fnm
|
||||
fi
|
||||
|
||||
#
|
||||
# write radar identifiers and prefixes to temp file
|
||||
#
|
||||
|
||||
echo "select lower(radid_prefix), lower(radid) from Radarloc WHERE use_radar='T';" | \
|
||||
$POSTGRESQLBINDIR/psql -q -A -t $DB_NAME > $DHR_LOG_DIR/radid.temp
|
||||
|
||||
|
||||
#
|
||||
# if radid.temp is not created or is 0 bytes in length, then print message and exit gather script
|
||||
#
|
||||
|
||||
if [[ ! -s $DHR_LOG_DIR/radid.temp ]]
|
||||
then
|
||||
rm -f $DHR_LOG_DIR/radid.temp
|
||||
rm -f $DHR_LOG_DIR/dhrgather_control.temp
|
||||
echo radid.temp file not created properly -- gather not done >> $fnm
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# for each radar id in file, construct directory name and search for products
|
||||
# note that the radar id in the directory name is lower case and 4 char
|
||||
#
|
||||
|
||||
FileCheck=0
|
||||
RainCheck=0
|
||||
|
||||
#for rid in `cat $DHR_LOG_DIR/radid.temp`
|
||||
#do
|
||||
#radid_dir=$(echo "$rid" | sed 's/|//g')
|
||||
#radid=${radid_dir#?}
|
||||
#PRODUCT_DIR_NAME=$DHR_DIRNAME1/$radid_dir/$DHR_DIRNAME2
|
||||
|
||||
#
|
||||
# Look for raw products in the directory satisfying the time test.
|
||||
# If found, then copy to a temporary location and mv to the gather directory.
|
||||
# Change radar id to upper case.
|
||||
# Successfully processed products will be deleted from the gather directory
|
||||
# as part of the decoding process.
|
||||
# Write old name, new name to log file.
|
||||
#
|
||||
# In addition, this now checks the DHR data for precip status category.
|
||||
# If category is 1 or 2, it will kick off the start_hpe script
|
||||
# which will run the DHR and DSP decoders and
|
||||
# the HPE field generator
|
||||
#
|
||||
|
||||
#num_files=`ls -F $PRODUCT_DIR_NAME 2>>$fnm | grep -v "*" | grep -v "/" | wc -l`
|
||||
#if test $num_files -ne 0
|
||||
#then
|
||||
# RADID=$(print $radid | tr '[a-z]' '[A-Z]')
|
||||
# i=0
|
||||
# for pname in `find $PRODUCT_DIR_NAME -type f -name '*' -newer $DHR_LOG_DIR/dhrgather_control`
|
||||
|
||||
for pname in `find $DHR_PROD_DIR -type f -name '*' -newer $DHR_LOG_DIR/dhrgather_control`
|
||||
do
|
||||
|
||||
#GATHER_FILENAME=$DHR_PROD_DIR/DHR$RADID.`date -u +%Y%m%d%H%M%S.$i`
|
||||
#i=`expr $i + 1`
|
||||
#cp $pname $DHR_LOG_DIR/radar_temp
|
||||
#mv $DHR_LOG_DIR/radar_temp $GATHER_FILENAME
|
||||
#echo $pname "copied to" $GATHER_FILENAME >> $fnm
|
||||
|
||||
echo $pname "copied to" $DHR_PROD_DIR >> $fnm
|
||||
GATHER_FILENAME=$pname
|
||||
|
||||
# check to see if the SUPL info in the DHR data indicates rain
|
||||
|
||||
result=`runso rary.ohd.pproc get_dhrpsm_main $pname`
|
||||
Get_DHRPsm=`expr $result`
|
||||
|
||||
# debug statement to log file
|
||||
echo "$GATHER_FILENAME Get_DHRPsm = $Get_DHRPsm" >> $hpefnm
|
||||
|
||||
FileCheck=1
|
||||
|
||||
if [[ $Get_DHRPsm > 0 ]]
|
||||
then
|
||||
RainCheck=1
|
||||
fi
|
||||
|
||||
done
|
||||
#fi
|
||||
|
||||
#done
|
||||
#
|
||||
# mv dhrgather control file
|
||||
#
|
||||
|
||||
mv $DHR_LOG_DIR/dhrgather_control.temp $DHR_LOG_DIR/dhrgather_control
|
||||
|
||||
#
|
||||
# delete radid.temp file
|
||||
#
|
||||
|
||||
rm $DHR_LOG_DIR/radid.temp
|
||||
|
||||
#
|
||||
# end write to log
|
||||
#
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
|
||||
# In order to execute gather every minute need to not stop or start HPE rapidly
|
||||
# If there were files last minute but not any now,
|
||||
# need to just continue with whatever RainCheck did last time
|
||||
|
||||
echo "FileCheck = $FileCheck" >> $hpefnm
|
||||
|
||||
if [[ $FileCheck -gt 0 ]]
|
||||
then
|
||||
|
||||
# based on precip category in new DHR products for the radars,
|
||||
# start or stop HPE processes
|
||||
|
||||
echo "checking to start or stop start_hpe script" $Dte >> $hpefnm
|
||||
phpe=`ps -ef|grep start_hpe|grep -v grep|grep -v vi|awk '{print $2}'`
|
||||
|
||||
echo "checking to start or stop start_hpe script" $Dte >> $hpefnm
|
||||
|
||||
if [[ -n "$phpe" ]]
|
||||
then
|
||||
checkHPErun
|
||||
RUN_FOUND=$?
|
||||
if test RUN_FOUND -eq 1
|
||||
then
|
||||
echo "We are going to stop start_hpe " $Dte >> $hpefnm
|
||||
`kill $phpe`
|
||||
phpe=`ps -ef|grep start_hpe|grep -v grep|grep -v vi|awk '{print $2}'`
|
||||
if [[ -n "$phpe" ]]
|
||||
then
|
||||
`kill -9 $phpe`
|
||||
fi
|
||||
phpe=`ps -ef|grep start_hpe|grep -v grep|grep -v vi|awk '{print $2}'`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $RainCheck -le 0 ]]
|
||||
then
|
||||
if [[ -z "$phpe" ]]
|
||||
then
|
||||
echo "start_hpe script not running " $Dte >> $hpefnm
|
||||
|
||||
# if HPE process isn't running, there's no precip in area.
|
||||
# remove the temp files in the DHR and DSP product directories
|
||||
#
|
||||
if [[ -d $DHR_PROD_DIR && -n "$DHR_PROD_DIR" ]]
|
||||
then
|
||||
echo "Removing temp DHR files in $DHR_PROD_DIR due to no rain " $Dte >> $hpefnm
|
||||
rm -f $DHR_PROD_DIR/*
|
||||
else
|
||||
echo "Attempted to remove files but " >> $hpefnm
|
||||
echo "DHR PROD DIR $DHR_PROD_DIR invalid " $Dte >> $hpefnm
|
||||
fi
|
||||
if [[ -d $DSP_PROD_DIR && -n "$DSP_PROD_DIR" ]]
|
||||
then
|
||||
echo "Removing temp DSP files in $DSP_PROD_DIR due to no rain " $Dte >> $hpefnm
|
||||
rm -f $DSP_PROD_DIR/*
|
||||
else
|
||||
echo "Attempted to remove files but " >> $hpefnm
|
||||
echo "DSP PROD DIR $DSP_PROD_DIR invalid " $Dte >> $hpefnm
|
||||
fi
|
||||
else
|
||||
|
||||
# Instead of using kill, will introduce a delayed stop.
|
||||
# This is especially important if radar files are separate in time
|
||||
# and only 1 radar is indicating precip but the rest are not
|
||||
# The time of this file is checked in start_hpe script.
|
||||
# If greater than 20 min, the start_hpe script will exit
|
||||
|
||||
if [[ ! -r $HPE_LOG_DIR/stop_hpe_process ]]
|
||||
then
|
||||
touch $HPE_LOG_DIR/stop_hpe_process
|
||||
echo "stop_hpe_process file created " $Dte >> $hpefnm
|
||||
echo "HPE decoders and field generator will stop after 20 min " >> $hpefnm
|
||||
echo "unless precipitation occurs at a radar before that time " >> $hpefnm
|
||||
else
|
||||
echo "stop_hpe_process file already created " $Dte >> $hpefnm
|
||||
echo "HPE process will stop in less than 20 min unless new precip occurs " >> \
|
||||
$hpefnm
|
||||
fi
|
||||
|
||||
|
||||
fi
|
||||
|
||||
|
||||
|
||||
elif [[ $RainCheck -gt 0 ]]
|
||||
then
|
||||
#sleep 10
|
||||
echo "RainCheck= "$RainCheck >> $hpefnm
|
||||
phpe=`ps -ef|grep start_hpe|grep -v grep|grep -v vi|awk '{print $2}' | wc -l`
|
||||
echo "PHPE " $phpe >>$hpefnm
|
||||
|
||||
if [[ $phpe -le 0 ]]
|
||||
then
|
||||
|
||||
echo "starting HPE processes via start_hpe script " $Dte >> $hpefnm
|
||||
/bin/ksh $PPROC_BIN/launch_hpe 2>$DHR_LOG_DIR/launch_hpe.err &
|
||||
exit 0
|
||||
else
|
||||
|
||||
# Remove the stop file if it exists because at least one radar has
|
||||
# indicated there's precip and don't want the processes to stop
|
||||
# prematurely.
|
||||
#
|
||||
# Script will check if one has been entered after it starts.
|
||||
|
||||
if [[ -r $HPE_LOG_DIR/stop_hpe_process ]]
|
||||
then
|
||||
rm -f $HPE_LOG_DIR/stop_hpe_process
|
||||
fi
|
||||
echo "start_hpe script already running " $Dte >> $hpefnm
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "no radar files found to process " $Dte >> $hpefnm
|
||||
# next statement goes with FileCheck if test
|
||||
fi
|
||||
exit 0
|
|
@ -1,186 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
#
|
||||
# script for gathering DPA products from fxa directories
|
||||
# and moving them to the OHD precip_proc world
|
||||
# August 22, 2003
|
||||
# updated for PostgreSQL - Oct 22, 2004
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export DPA_DIRNAME2=$(get_apps_defaults dpa_dirname2)
|
||||
export DPA_DIRNAME1=$(get_apps_defaults dpa_dirname1)
|
||||
export DPA_PROD_DIR=$(get_apps_defaults dpa_prod_dir)
|
||||
export DPA_LOG_DIR=$(get_apps_defaults dpa_log_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
# this script uses the following set_hydro_env variables:
|
||||
# DPA_LOG_DIR, DPA_PROD_DIR, DPA_DIRNAME1, DPA_DIRNAME2, DB_NAME
|
||||
#
|
||||
|
||||
# setup log files
|
||||
#
|
||||
|
||||
fnm=$DPA_LOG_DIR/dpagather_log
|
||||
tmp=$DPA_LOG_DIR/dpagather_tmp
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# Don't fire up another instance if one is underway
|
||||
# the AMIRUNNING_DIR variable is required by the amirunning script
|
||||
|
||||
AMIRUNNING_DIR=$DPA_LOG_DIR
|
||||
|
||||
. /awips/hydroapps/public/bin/amirunning
|
||||
|
||||
if [[ $AmIRunning = "yes" ]]; then
|
||||
echo DPAgather script cancelled due to other instance running >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
if [[ -s $fnm ]]
|
||||
then
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
fi
|
||||
|
||||
#
|
||||
# begin write to log
|
||||
#
|
||||
|
||||
echo "-------------------------------------------" >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "Begin gather at "$Dte >> $fnm
|
||||
|
||||
#
|
||||
# touch dpagather control temp file
|
||||
#
|
||||
|
||||
touch $DPA_LOG_DIR/dpagather_control.temp
|
||||
|
||||
#
|
||||
# check if radarloc table has any radars with use_radar="T"
|
||||
# if not, then stop
|
||||
# the sed command strips off all non-numeric characters
|
||||
#
|
||||
|
||||
Num_Rad=$(
|
||||
echo "select count(*) from Radarloc WHERE use_radar='T'; " | \
|
||||
$POSTGRESQLBINDIR/psql -q -t $DB_NAME
|
||||
)
|
||||
|
||||
if [[ $Num_Rad = 0 ]]
|
||||
then
|
||||
|
||||
echo no radars defined for use -- gather not done >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
exit 1
|
||||
|
||||
elif [[ -z $Num_Rad ]]
|
||||
then
|
||||
|
||||
echo number of radars defined for use is undetermined -- gather continuing >> $fnm
|
||||
fi
|
||||
|
||||
#
|
||||
# write radar identifiers and prefixes to temp file
|
||||
#
|
||||
|
||||
|
||||
#$POSTGRESQLBINDIR/psql -q -t $DB_NAME
|
||||
#unload to "$DPA_LOG_DIR/radid.temp"
|
||||
#select lower(radid_prefix), lower(radid)
|
||||
#from Radarloc
|
||||
#WHERE use_radar='T';
|
||||
|
||||
echo "select lower(radid_prefix), lower(radid) from Radarloc WHERE use_radar='T';" | \
|
||||
$POSTGRESQLBINDIR/psql -q -A -t $DB_NAME > $DPA_LOG_DIR/radid.temp
|
||||
|
||||
|
||||
#
|
||||
# if radid.temp is not created or is 0 bytes in length, then print message and exit gather script
|
||||
#
|
||||
|
||||
if [[ ! -s $DPA_LOG_DIR/radid.temp ]]
|
||||
then
|
||||
rm -f $DPA_LOG_DIR/radid.temp
|
||||
rm -f $DPA_LOG_DIR/dpagather_control.temp
|
||||
echo radid.temp file not created properly -- gather not done >> $fnm
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# for each radar id in file, construct directory name and search for products
|
||||
# note that the radar id in the directory name is lower case and 4 char
|
||||
#
|
||||
|
||||
for rid in `cat $DPA_LOG_DIR/radid.temp`
|
||||
do
|
||||
|
||||
radid_dir=$(echo "$rid" | sed 's/|//g')
|
||||
radid=${radid_dir#?}
|
||||
|
||||
PRODUCT_DIR_NAME=$DPA_DIRNAME1/$radid_dir/$DPA_DIRNAME2
|
||||
|
||||
#
|
||||
# look for raw products in the directory satisfying the time test
|
||||
# if found, then copy to a temporary location and mv to the gather directory
|
||||
# change radar id to upper case
|
||||
# successfully processed products will be deleted from the gather directory as part of
|
||||
# the decoding process
|
||||
# write old name, new name to log file
|
||||
#
|
||||
|
||||
num_files=`ls -F $PRODUCT_DIR_NAME 2>>$fnm | grep -v "*" | grep -v "/" | wc -l`
|
||||
if test $num_files -ne 0
|
||||
then
|
||||
RADID=$(print $radid | tr '[a-z]' '[A-Z]')
|
||||
|
||||
i=0
|
||||
|
||||
for pname in `find $PRODUCT_DIR_NAME -type f -name '*' -newer $DPA_LOG_DIR/dpagather_control`
|
||||
do
|
||||
|
||||
GATHER_FILENAME=$DPA_PROD_DIR/DPA$RADID.`date -u +%Y%m%d%H%M%S.$i`
|
||||
i=`expr $i + 1`
|
||||
|
||||
cp $pname $DPA_LOG_DIR/radar_temp
|
||||
|
||||
mv $DPA_LOG_DIR/radar_temp $GATHER_FILENAME
|
||||
|
||||
echo $pname "copied to" $GATHER_FILENAME >> $fnm
|
||||
|
||||
done
|
||||
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
#
|
||||
# mv dpagather control file
|
||||
#
|
||||
|
||||
mv $DPA_LOG_DIR/dpagather_control.temp $DPA_LOG_DIR/dpagather_control
|
||||
|
||||
#
|
||||
# delete radid.temp file
|
||||
#
|
||||
|
||||
rm $DPA_LOG_DIR/radid.temp
|
||||
|
||||
#
|
||||
# end write to log
|
||||
#
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
|
@ -1,223 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
#
|
||||
# script for gathering DSP products from fxa directories
|
||||
# and moving them to the OHD precip_proc world
|
||||
# October 16, 2007
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# this script is modified based on DPAgather script.
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export DSP_LOG_DIR=$(get_apps_defaults dsp_log_dir)
|
||||
export DSP_DIRNAME2=$(get_apps_defaults dsp_dirname2)
|
||||
export DSP_DIRNAME1=$(get_apps_defaults dsp_dirname1)
|
||||
export DSP_PROD_DIR=$(get_apps_defaults dsp_prod_dir)
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
#
|
||||
# define function for checking for a stop file to bypass the gather process
|
||||
|
||||
checkcronstop()
|
||||
{
|
||||
STOP_DIR=$1
|
||||
test -r $STOP_DIR/stop_hpe_crons
|
||||
FILES_FOUND=$?
|
||||
if test $FILES_FOUND -eq 0
|
||||
then
|
||||
|
||||
return 1
|
||||
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
checkcronstop $HPE_LOG_DIR
|
||||
STOP_FOUND=$?
|
||||
|
||||
if test STOP_FOUND -eq 1
|
||||
then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Note that this is slightly different than the DHR log as that
|
||||
# is the primary data gather process. DSP is secondary and will
|
||||
# not have stop_hpe_crons info in the log.
|
||||
#
|
||||
|
||||
# setup log files
|
||||
#
|
||||
|
||||
fnm=$DSP_LOG_DIR/dspgather_log
|
||||
tmp=$DSP_LOG_DIR/dspgather_tmp
|
||||
#----------------------------------------------------------------------
|
||||
# Don't fire up another instance if one is underway
|
||||
# the AMIRUNNING_DIR variable is required by the amirunning script
|
||||
|
||||
AMIRUNNING_DIR=$DSP_LOG_DIR
|
||||
|
||||
. $RFCLX/public/bin/amirunning
|
||||
|
||||
if [[ $AmIRunning = "yes" ]]; then
|
||||
echo DSPgather script cancelled due to other instance running >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
if [[ -s $fnm ]]
|
||||
then
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
fi
|
||||
|
||||
#
|
||||
# begin write to log
|
||||
#
|
||||
|
||||
echo "-------------------------------------------" >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "Begin gather at "$Dte >> $fnm
|
||||
|
||||
#if [[ ! -d $DSP_PROD_DIR ]]
|
||||
#then
|
||||
# echo "The DSP PROD DIR $DSP_PROD_DIR is invalid or " >> $fnm
|
||||
# echo "does not exist. Exiting DHRgather script. " $Dte >> $fnm
|
||||
# exit 1
|
||||
#fi
|
||||
|
||||
#
|
||||
# touch dspgather control temp file
|
||||
#
|
||||
|
||||
touch $DSP_LOG_DIR/dspgather_control.temp
|
||||
touch /tmp/dhrgather_control.temp
|
||||
|
||||
#
|
||||
# check if radarloc table has any radars with use_radar="T"
|
||||
# if not, then stop
|
||||
# the sed command strips off all non-numeric characters
|
||||
#
|
||||
|
||||
Num_Rad=$(
|
||||
echo "select count(*) from Radarloc WHERE use_radar='T'; " | \
|
||||
$POSTGRESQLBINDIR/psql -q -t $DB_NAME
|
||||
)
|
||||
|
||||
if [[ $Num_Rad = 0 ]]
|
||||
then
|
||||
|
||||
echo no radars defined for use -- gather not done >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
exit 1
|
||||
|
||||
elif [[ -z $Num_Rad ]]
|
||||
then
|
||||
|
||||
echo number of radars defined for use is undetermined -- gather continuing >> $fnm
|
||||
fi
|
||||
|
||||
#
|
||||
# write radar identifiers and prefixes to temp file
|
||||
#
|
||||
|
||||
|
||||
#$POSTGRESQLBINDIR/psql -q -t $DB_NAME
|
||||
#unload to "$DSP_LOG_DIR/radid.temp"
|
||||
#select lower(radid_prefix), lower(radid)
|
||||
#from Radarloc
|
||||
#WHERE use_radar='T';
|
||||
|
||||
echo "select lower(radid_prefix), lower(radid) from Radarloc WHERE use_radar='T';" | \
|
||||
$POSTGRESQLBINDIR/psql -q -A -t $DB_NAME > $DSP_LOG_DIR/radid.temp
|
||||
|
||||
#
|
||||
# if radid.temp is not created or is 0 bytes in length, then print message and exit gather script
|
||||
#
|
||||
|
||||
if [[ ! -s $DSP_LOG_DIR/radid.temp ]]
|
||||
then
|
||||
rm -f $DSP_LOG_DIR/radid.temp
|
||||
rm -f $DSP_LOG_DIR/dspgather_control.temp
|
||||
echo "radid.temp file not created properly -- gather not done" >> $fnm
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# for each radar id in file, construct directory name and search for products
|
||||
# note that the radar id in the directory name is lower case and 4 char
|
||||
#
|
||||
#echo "DSP_DIRNAME1 = $DSP_DIRNAME1" >> $fnm
|
||||
#echo "DSP_DIRNAME2 = $DSP_DIRNAME2" >> $fnm
|
||||
#for rid in `cat $DSP_LOG_DIR/radid.temp`
|
||||
#do
|
||||
#radid_dir=$(echo "$rid" | sed 's/|//g')
|
||||
#radid=${radid_dir#?}
|
||||
#PRODUCT_DIR_NAME=$DSP_DIRNAME1/$radid_dir/$DSP_DIRNAME2
|
||||
#echo "PRODUCT_DIR_NAME = $PRODUCT_DIR_NAME radid dir=$radid_dir"
|
||||
|
||||
#
|
||||
# look for raw products in the directory satisfying the time test
|
||||
# if found, then copy to a temporary location and mv to the gather directory
|
||||
# change radar id to upper case
|
||||
# successfully processed products will be deleted from the gather directory as part of
|
||||
# the decoding process
|
||||
# write old name, new name to log file
|
||||
#
|
||||
|
||||
#num_files=`ls -F $PRODUCT_DIR_NAME 2>>$fnm | grep -v "*" | grep -v "/" | wc -l`
|
||||
#echo "num_files = $num_files"
|
||||
#if test $num_files -ne 0
|
||||
#then
|
||||
# RADID=$(print $radid | tr '[a-z]' '[A-Z]')
|
||||
# i=0
|
||||
# for pname in `find $PRODUCT_DIR_NAME -type f -name '*' -newer $DSP_LOG_DIR/dspgather_control`
|
||||
|
||||
for pname in `find $DSP_PROD_DIR -type f -name '*' -newer $DSP_LOG_DIR/dspgather_control`
|
||||
do
|
||||
|
||||
# GATHER_FILENAME=$DSP_PROD_DIR/DSP$RADID.`date -u +%Y%m%d%H%M%S.$i`
|
||||
# echo "GATHER_FILENAME = $GATHER_FILENAME"
|
||||
# i=`expr $i + 1`
|
||||
# cp $pname $DSP_LOG_DIR/radar_temp
|
||||
# mv $DSP_LOG_DIR/radar_temp $GATHER_FILENAME
|
||||
# echo $pname "copied to" $GATHER_FILENAME >> $fnm
|
||||
|
||||
echo $pname "copied to" $DSP_PROD_DIR >> $fnm
|
||||
done
|
||||
|
||||
#fi
|
||||
#done
|
||||
#
|
||||
# mv dspgather control file
|
||||
#
|
||||
|
||||
mv $DSP_LOG_DIR/dspgather_control.temp $DSP_LOG_DIR/dspgather_control
|
||||
|
||||
#
|
||||
# delete radid.temp file
|
||||
#
|
||||
|
||||
rm $DSP_LOG_DIR/radid.temp
|
||||
|
||||
#
|
||||
# end write to log
|
||||
#
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End gather at "$Dte >> $fnm
|
||||
|
||||
exit 0
|
|
@ -1,120 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# This script retrieves Digital Hybrid Reflectivity (DHR)
|
||||
# radar products from the temporary data gathering directory
|
||||
# and sends them to the decode_dhr_dsp executable for decoding.
|
||||
#
|
||||
# Adapted from the Run_DecodeDPA script
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30, 2007
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
|
||||
DHRPRODDIR=`get_apps_defaults dhr_prod_dir`
|
||||
DHRLOGDIR=`get_apps_defaults dhr_log_dir`
|
||||
|
||||
#
|
||||
# set up log filename
|
||||
#
|
||||
|
||||
Dte=`date +%m%d%Y`
|
||||
fnm=$DHRLOGDIR/decodedhr_log_$Dte
|
||||
ctlfnm=$DHRLOGDIR/decodedhr_ctl
|
||||
|
||||
echo "fnm is $fnm"
|
||||
|
||||
test -e $DHRLOGDIR/decodedhr_log_$Dte
|
||||
FOUND=$?
|
||||
if test FOUND -eq 1
|
||||
then
|
||||
touch $DHRLOGDIR/decodedhr_log_$Dte
|
||||
fi
|
||||
|
||||
test -e $DHRLOGDIR/decodedhr_ctl
|
||||
FOUND=$?
|
||||
if test FOUND -eq 1
|
||||
then
|
||||
touch $DHRLOGDIR/decodedhr_ctl
|
||||
fi
|
||||
#----------------------------------------------------------------------
|
||||
# Don't fire up another instance if one is underway
|
||||
|
||||
AMIRUNNING_DIR=$(get_apps_defaults dhr_log_dir)
|
||||
. $RFCLX/public/bin/amirunning
|
||||
|
||||
if [[ $AmIRunning = "yes" ]]; then
|
||||
echo Run_DecodeDHR cancelled due to other instance running >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check to see if age on file is more than 1 minute
|
||||
runfreq=$((60))
|
||||
tnow=`date +%s`
|
||||
tfile=`date +%s -r $ctlfnm`
|
||||
agesecs=$(($tnow-$tfile))
|
||||
if [[ $agesecs -lt $runfreq ]]
|
||||
then
|
||||
exit 1
|
||||
else
|
||||
touch $ctlfnm
|
||||
fi
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
#
|
||||
# define delete messages written to log file
|
||||
#
|
||||
|
||||
delete_message=" product deleted"
|
||||
no_delete_message=" product not deleted"
|
||||
|
||||
#
|
||||
# check for products in the gather directory
|
||||
# if found, then decode
|
||||
# note that products in the gather dir will not be decoded if they have "x"
|
||||
# permission
|
||||
#
|
||||
echo "Log file: " $fnm
|
||||
|
||||
for pname in `ls -1F $DHRPRODDIR | grep -v "*" | grep -v "/" `
|
||||
do
|
||||
runso rary.ohd.pproc decode_dhr_dsp_main $DHRPRODDIR/$pname >> $fnm
|
||||
|
||||
#
|
||||
# return codes from decode_dhr_dsp
|
||||
#
|
||||
# 0 -- valid product (precip > 0 or supplemental message = "no precip detected")
|
||||
# 1 -- valid product, no decoded file created
|
||||
# (product not top-of-hour, supplemental message = "bad rate scan" etc)
|
||||
# 2 -- invalid product
|
||||
# (loss of data, unexpected EOF, invalid date or time, etc)
|
||||
# 3 -- problem outside of product
|
||||
# (error opening Informix db, error opening product)
|
||||
# 4 -- radar not in radarloc table OR use_radar=F
|
||||
|
||||
fstat=$?
|
||||
if test $fstat -lt 3
|
||||
then
|
||||
rm -f $DHRPRODDIR/$pname
|
||||
fi
|
||||
|
||||
if test $fstat -eq 2
|
||||
then
|
||||
echo "$delete_message" >> $fnm
|
||||
elif test $fstat -eq 3
|
||||
then
|
||||
echo "$no_delete_message" >> $fnm
|
||||
fi
|
||||
|
||||
if test $fstat -eq 4
|
||||
then
|
||||
rm -f $DHRPRODDIR/$pname
|
||||
echo "$delete_message" >> $fnm
|
||||
fi
|
||||
|
||||
done
|
||||
exit 0
|
|
@ -1,90 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# this script gathers the DPA (Digital Precip Array) radar products
|
||||
# into the proper directory and decodes them
|
||||
#
|
||||
# the decodedpa executable decodes the products
|
||||
#
|
||||
# a decoded radar product is a 131x131 array of REAL*4 values where each value
|
||||
# represents the average hourly rainfall at the center of each bin
|
||||
#
|
||||
# beginning in OB3, the decoded radar products are in Little Endian format
|
||||
#
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
|
||||
DPAPRODDIR=`get_apps_defaults dpa_prod_dir`
|
||||
DPALOGDIR=`get_apps_defaults dpa_log_dir`
|
||||
BIN_DIR=`get_apps_defaults pproc_bin`
|
||||
|
||||
#
|
||||
# set up log filename
|
||||
#
|
||||
|
||||
Dte=`date +%m%d%Y`
|
||||
fnm=$DPALOGDIR/decodedpa_log_$Dte
|
||||
|
||||
#
|
||||
# define delete messages written to log file
|
||||
#
|
||||
|
||||
delete_message=" product deleted"
|
||||
no_delete_message=" product not deleted"
|
||||
file=$1
|
||||
|
||||
if [[ -e $file.LCK ]]
|
||||
then
|
||||
return
|
||||
else
|
||||
`touch $file.LCK`
|
||||
fi
|
||||
|
||||
runso rary.ohd.pproc decode_dpa_main $1 >> $fnm
|
||||
|
||||
#
|
||||
# return codes from decodedpa
|
||||
#
|
||||
# 0 -- valid product (precip > 0 or supplemental message = "no precip detected")
|
||||
# 1 -- valid product, no decoded file created
|
||||
# (product not top-of-hour, supplemental message = "bad rate scan" etc)
|
||||
# 2 -- invalid product
|
||||
# (loss of data, unexpected EOF, invalid date or time, etc)
|
||||
# 3 -- problem outside of product
|
||||
# (error opening database, error opening product)
|
||||
# 4 -- radar not in radarloc table OR use_radar=F
|
||||
# 5 -- problem converting raw product from Big Endian to Little Endian format
|
||||
# (Linux only)
|
||||
|
||||
fstat=$?
|
||||
# echo "decode dpa return code for file: $1 " $fstat >> $fnm
|
||||
if test $fstat -lt 3
|
||||
then
|
||||
rm -f $file
|
||||
rm -f $file.LE
|
||||
fi
|
||||
|
||||
if test $fstat -eq 2
|
||||
then
|
||||
echo "$delete_message" >> $fnm
|
||||
elif test $fstat -eq 3
|
||||
then
|
||||
echo "$no_delete_message" >> $fnm
|
||||
fi
|
||||
|
||||
if test $fstat -eq 4
|
||||
then
|
||||
rm -f $file
|
||||
rm -f $file.LE
|
||||
echo "$delete_message" >> $fnm
|
||||
fi
|
||||
if test $fstat -eq 5
|
||||
then
|
||||
rm -f $file
|
||||
rm -f $file.LE
|
||||
echo "$delete_message" >> $fnm
|
||||
fi
|
||||
`rm $file.LCK`
|
||||
exit $fstat
|
|
@ -1,122 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# This script retrieves Digital Storm total Precipitation
|
||||
# (DSP) radar products from the temporary data gathering directory
|
||||
# and sends them to the decode_dhr_dsp executable for decoding.
|
||||
#
|
||||
# Adapted from the Run_DecodeDPA script
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30, 2007
|
||||
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
|
||||
export DSPPRODDIR=`get_apps_defaults dsp_prod_dir`
|
||||
export DSPLOGDIR=`get_apps_defaults dsp_log_dir`
|
||||
|
||||
#
|
||||
# set up log filename
|
||||
#
|
||||
|
||||
Dte=`date +%m%d%Y`
|
||||
fnm=$DSPLOGDIR/decodedsp_log_$Dte
|
||||
ctlfnm=$DSPLOGDIR/decodedsp_ctl
|
||||
|
||||
test -e $DSPLOGDIR/decodedsp_log_$Dte
|
||||
FOUND=$?
|
||||
if test FOUND -eq 1
|
||||
then
|
||||
touch $DSPLOGDIR/decodedsp_log_$Dte
|
||||
fi
|
||||
|
||||
test -e $DSPLOGDIR/decodedsp_ctl
|
||||
FOUND=$?
|
||||
if test FOUND -eq 1
|
||||
then
|
||||
touch $DSPLOGDIR/decodedsp_ctl
|
||||
fi
|
||||
#----------------------------------------------------------------------
|
||||
# Don't fire up another instance if one is underway
|
||||
|
||||
AMIRUNNING_DIR=$(get_apps_defaults dsp_log_dir)
|
||||
|
||||
. $RFCLX/public/bin/amirunning
|
||||
|
||||
if [[ $AmIRunning = "yes" ]]; then
|
||||
echo Run_DecodeDSP cancelled due to other instance running >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check to see if age on file is more than 1 minute
|
||||
runfreq=$((60))
|
||||
tnow=`date +%s`
|
||||
tfile=`date +%s -r $ctlfnm`
|
||||
agesecs=$(($tnow-$tfile))
|
||||
if [[ $agesecs -lt $runfreq ]]
|
||||
then
|
||||
exit 1
|
||||
else
|
||||
touch $ctlfnm
|
||||
fi
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
#
|
||||
# define delete messages written to log file
|
||||
#
|
||||
|
||||
delete_message=" product deleted"
|
||||
no_delete_message=" product not deleted"
|
||||
|
||||
#
|
||||
# check for products in the gather directory
|
||||
# if found, then decode
|
||||
# note that products in the gather dir will not be decoded if they have "x"
|
||||
# permission
|
||||
#
|
||||
echo "Log file: " $fnm
|
||||
|
||||
for pname in `ls -1F $DSPPRODDIR | grep -v "*" | grep -v "/" `
|
||||
do
|
||||
echo $pname
|
||||
runso rary.ohd.pproc decode_dhr_dsp_main $DSPPRODDIR/$pname >> $fnm
|
||||
|
||||
|
||||
|
||||
#
|
||||
# return codes from decode_dhr_dsp
|
||||
#
|
||||
# 0 -- valid product (precip > 0 or supplemental message = "no precip detected")
|
||||
# 1 -- valid product, no decoded file created
|
||||
# (product not top-of-hour, supplemental message = "bad rate scan" etc)
|
||||
# 2 -- invalid product
|
||||
# (loss of data, unexpected EOF, invalid date or time, etc)
|
||||
# 3 -- problem outside of product
|
||||
# (error opening Informix db, error opening product)
|
||||
# 4 -- radar not in radarloc table OR use_radar=F
|
||||
|
||||
fstat=$?
|
||||
if test $fstat -lt 3
|
||||
then
|
||||
rm -f $DSPPRODDIR/$pname
|
||||
fi
|
||||
|
||||
if test $fstat -eq 2
|
||||
then
|
||||
echo "$delete_message" >> $fnm
|
||||
elif test $fstat -eq 3
|
||||
then
|
||||
echo "$no_delete_message" >> $fnm
|
||||
fi
|
||||
|
||||
if test $fstat -eq 4
|
||||
then
|
||||
rm -f $DSPPRODDIR/$pname
|
||||
echo "$delete_message" >> $fnm
|
||||
fi
|
||||
|
||||
done
|
||||
exit 0
|
Binary file not shown.
Binary file not shown.
|
@ -1,33 +0,0 @@
|
|||
-- gather distinct lid,ts combinations from Location and Ingestfilter tables
|
||||
|
||||
select l.lid, i.ts
|
||||
into temp temptable
|
||||
from location l, ingestfilter i
|
||||
where l.lid = i.lid
|
||||
and l.lat is not null
|
||||
and l.lon is not null
|
||||
and i.pe in ('PP', 'PC')
|
||||
and substr(i.ts, 1, 1) = 'R'
|
||||
and i.stg2_input = 'T'
|
||||
group by 1, 2
|
||||
order by 1, 2;
|
||||
|
||||
|
||||
-- add info from location table for the lid
|
||||
|
||||
select
|
||||
l.lid,
|
||||
'PPH'||t.ts||'ZZ',
|
||||
to_char(l.lat,'99.99'),
|
||||
to_char(l.lon,'999.99'),
|
||||
case
|
||||
when l.elev >= 0 then to_char(l.elev,'99999')
|
||||
else ' -999'
|
||||
end,
|
||||
'9',
|
||||
case
|
||||
when l.name ISNULL then 'XXXXXXXXXX'
|
||||
else l.name
|
||||
end
|
||||
from temptable t, location l
|
||||
where t.lid=l.lid;
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
nohup ksh $PPROC_BIN/start_hpe >/dev/null 2>&1 &
|
||||
exit 0
|
|
@ -1,43 +0,0 @@
|
|||
-- gather distinct lid,ts combinations from HourlyPP, HourlyPC and DailyPP tables
|
||||
-- do not include ts = P*
|
||||
|
||||
select lid, ts
|
||||
into temp temptable
|
||||
from hourlypp
|
||||
where substr(ts, 1, 1) != 'P'
|
||||
group by 1,2
|
||||
|
||||
union
|
||||
|
||||
select lid, ts
|
||||
from hourlypc
|
||||
where substr(ts, 1, 1) != 'P'
|
||||
group by 1,2
|
||||
|
||||
union
|
||||
|
||||
select lid, ts
|
||||
from dailypp
|
||||
where substr(ts, 1, 1) != 'P'
|
||||
group by 1,2
|
||||
;
|
||||
|
||||
-- add info from location table for the lid
|
||||
|
||||
select
|
||||
l.lid,
|
||||
'PPD'||t.ts||'ZZ',
|
||||
to_char(l.lat,'99.99'),
|
||||
to_char(l.lon,'999.99'),
|
||||
case
|
||||
when l.elev >= 0 then to_char(l.elev,'99999')
|
||||
else ' -999'
|
||||
end,
|
||||
'9',
|
||||
case
|
||||
when l.name ISNULL then 'XXXXXXXXXX'
|
||||
else l.name
|
||||
end
|
||||
from temptable t, location l
|
||||
where t.lid=l.lid
|
||||
order by l.lid asc;
|
Binary file not shown.
|
@ -1,103 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# process_dpa
|
||||
#
|
||||
# October 10, 2000
|
||||
# run script for process dpa
|
||||
# processes single file by decoding it
|
||||
#
|
||||
# Modified: Oct 29, 2001 Russ added OS_SUFFIX logic
|
||||
# Nov 5, 2002 Mark removed read_dpainfo, bld_latest
|
||||
# Sep 02, 2003 paul added delete of ".LE" (Little Endian) files
|
||||
#
|
||||
# check the input args
|
||||
# currently, the bin_dir argument is not used.
|
||||
|
||||
USAGE="process_dpa input_data_file output_log_file err_dir bin_dir"
|
||||
|
||||
if [ $# -ne 4 ]
|
||||
then
|
||||
print -u2 "\nUSAGE: $USAGE\n"
|
||||
exit 3
|
||||
fi
|
||||
|
||||
# setup file names
|
||||
# note: the bin directory arg is not used currently
|
||||
|
||||
DATA_FNM=$1
|
||||
LOG_FNM=$2
|
||||
ERR_DIR=$3
|
||||
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS Team applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
|
||||
# echo the current time
|
||||
#
|
||||
DTZ=`date -u +%a@%H:%M:%S`
|
||||
echo "Decoding product: " $DTZ >> $LOG_FNM
|
||||
|
||||
|
||||
# now run the decoder *************************************************
|
||||
# get and save the return status for later use.
|
||||
|
||||
$PPROC_BIN/decodedpa$OS_SUFFIX $DATA_FNM >> $LOG_FNM 2>>$LOG_FNM
|
||||
#echo Awaiting proper decodedpa return codes >> $LOG_FNM
|
||||
DECODE_STATUS=$?
|
||||
|
||||
#
|
||||
# now dispense of the input file accordingly and continue accoringly
|
||||
# depending on the status of the decoding operation.
|
||||
#
|
||||
|
||||
if [ $DECODE_STATUS -eq 0 ]
|
||||
then
|
||||
echo Decode status=$DECODE_STATUS ok, removing file. >> $LOG_FNM
|
||||
rm -f $DATA_FNM >> $LOG_FNM 2>>$LOG_FNM
|
||||
rm -f $DATA_FNM.LE >> $LOG_FNM 2>>$LOG_FNM
|
||||
|
||||
elif [ $DECODE_STATUS -eq 1 ]
|
||||
then
|
||||
echo Decode status=$DECODE_STATUS data unavailable, removing file. >> $LOG_FNM
|
||||
rm -f $DATA_FNM >> $LOG_FNM 2>>$LOG_FNM
|
||||
rm -f $DATA_FNM.LE >> $LOG_FNM 2>>$LOG_FNM
|
||||
exit
|
||||
|
||||
elif [ $DECODE_STATUS -eq 2 ]
|
||||
then
|
||||
echo Decode status=$DECODE_STATUS decode error, moving file to error dir. >> $LOG_FNM
|
||||
mv -f $DATA_FNM $ERR_DIR >> $LOG_FNM 2>>$LOG_FNM
|
||||
mv -f $DATA_FNM.LE $ERR_DIR >> $LOG_FNM 2>>$LOG_FNM
|
||||
exit
|
||||
|
||||
elif [ $DECODE_STATUS -eq 3 ]
|
||||
then
|
||||
echo Decode status=$DECODE_STATUS processing error, moving file to error dir. >> $LOG_FNM
|
||||
mv -f $DATA_FNM $ERR_DIR >> $LOG_FNM 2>>$LOG_FNM
|
||||
exit
|
||||
|
||||
elif [ $DECODE_STATUS -eq 4 ]
|
||||
then
|
||||
echo Decode status=$DECODE_STATUS radar undefined or inactive, removing file. >> $LOG_FNM
|
||||
rm -f $DATA_FNM >> $LOG_FNM 2>>$LOG_FNM
|
||||
exit
|
||||
|
||||
elif [ $DECODE_STATUS -eq 5 ]
|
||||
then
|
||||
echo Decode status=$DECODE_STATUS error converting raw product from Big Endian to Little Endian format. >> $LOG_FNM
|
||||
rm -f $DATA_FNM >> $LOG_FNM 2>>$LOG_FNM
|
||||
exit
|
||||
fi
|
||||
|
||||
|
||||
#
|
||||
DTZ=`date -u +%a@%H:%M:%S`
|
||||
echo "Completed job at: " $DTZ >> $LOG_FNM
|
||||
|
||||
exit
|
||||
#
|
|
@ -1,152 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# process_dpafiles
|
||||
#
|
||||
# May 10, 2002
|
||||
# script for processing dpa products for the WHFS.
|
||||
#
|
||||
# the script takes the input data directory,
|
||||
# output log directory, and bin directory as arguments.
|
||||
# it is possible that the calling script may be redirecting
|
||||
# all standard output from this script to the same log file.
|
||||
#
|
||||
# Modified: Nov 5, 2002 Mark removed update_latestFFG
|
||||
# Aug 15, 2003 Paul - replaced "test -r" with "ls -1F ..."
|
||||
#
|
||||
|
||||
# check for input args
|
||||
#
|
||||
|
||||
USAGE="process_dpafiles input_data_dir output_log_dir output_error_dir bin_dir1 bin_dir2 "
|
||||
if [ $# -ne 5 ]
|
||||
then
|
||||
print -u2 "\nUSAGE: $USAGE\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# set some local variables
|
||||
# the bin_dir1 arg is for the precip_proc/bin dir
|
||||
# the bin_dir2 arg is for the whfs/bin dir, which is currently not used
|
||||
|
||||
DATA_DIR=$1
|
||||
LOG_DIR=$2
|
||||
ERR_DIR=$3
|
||||
BIN_DIR1=$4
|
||||
BIN_DIR2=$5
|
||||
|
||||
# define the log file name
|
||||
|
||||
LOG_FILE=$LOG_DIR/`date -u +process_dpa_log_%m%d`
|
||||
|
||||
echo "Data directory: " $DATA_DIR >> $LOG_FILE
|
||||
echo "Log directory: " $LOG_DIR >> $LOG_FILE
|
||||
echo "Error directory:" $ERR_DIR >> $LOG_FILE
|
||||
echo "Bin directory:" $BIN_DIR1 >> $LOG_FILE
|
||||
|
||||
#
|
||||
# define functions for checking for a stop file and
|
||||
# logging a message. note that for the
|
||||
# shell function "test", a true condition returns 0.
|
||||
|
||||
checkDPAstop()
|
||||
{
|
||||
STOP_DIR=$1
|
||||
test -r $STOP_DIR/stop_dpadecode
|
||||
FILES_FOUND=$?
|
||||
if test $FILES_FOUND -eq 0
|
||||
then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
logDPAstop()
|
||||
{
|
||||
LOG_FILE=$1
|
||||
|
||||
DTZ=`date -u`
|
||||
echo "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" >> $LOG_FILE
|
||||
echo "Terminating process." >> $LOG_FILE
|
||||
echo "Stopfile found at:" $DTZ >> $LOG_FILE
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" >> $LOG_FILE
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
# enter an infinite loop. for each pass thru main loop,
|
||||
# look for input files and process them individually.
|
||||
# check for stop file after each loop and after any
|
||||
# data file that may be processed.
|
||||
# after the raw input file is processed, the called
|
||||
# processing script handles the file (either deletes or moves it)
|
||||
#
|
||||
|
||||
let loop_cnt=0
|
||||
|
||||
while test 1=1
|
||||
do
|
||||
|
||||
# define the log file name
|
||||
|
||||
LOG_FILE=$LOG_DIR/`date -u +process_dpa_log_%m%d`
|
||||
|
||||
#
|
||||
# this script also performs a secondary function of
|
||||
# creating the mosaicked FFG grids, and computing areal FFG values
|
||||
# via the gen_areal_ffg processes.
|
||||
#
|
||||
|
||||
let loop_cnt=loop_cnt+1
|
||||
if ((loop_cnt > 12))
|
||||
then
|
||||
DTZ=`date -u +%a@%H:%M:%S`
|
||||
echo Running gen_areal_ffg. $DTZ >> $LOG_FILE
|
||||
$BIN_DIR1/run_gen_areal_ffg
|
||||
let loop_cnt=0
|
||||
fi
|
||||
|
||||
checkDPAstop $DATA_DIR
|
||||
STOP_FOUND=$?
|
||||
|
||||
if test STOP_FOUND -eq 1
|
||||
then
|
||||
logDPAstop $LOG_FILE
|
||||
exit
|
||||
fi
|
||||
|
||||
FILES_FOUND=`ls -F $DATA_DIR | grep -v "*" | grep -v "/" | wc -l`
|
||||
if test $FILES_FOUND -ne 0
|
||||
then
|
||||
for DATA_FILE in `ls $DATA_DIR`
|
||||
do
|
||||
|
||||
checkDPAstop $DATA_DIR
|
||||
STOP_FOUND=$?
|
||||
if test STOP_FOUND -eq 1
|
||||
then
|
||||
logDPAstop $LOG_FILE
|
||||
exit
|
||||
fi
|
||||
|
||||
|
||||
#name=`basename $DATA_FILE`
|
||||
echo "----------------" >> $LOG_FILE
|
||||
echo Processing $DATA_DIR/$DATA_FILE >> $LOG_FILE
|
||||
$BIN_DIR1/process_dpa $DATA_DIR/$DATA_FILE $LOG_FILE $ERR_DIR $BIN_DIR1 2>>$LOG_FILE
|
||||
|
||||
done
|
||||
|
||||
else
|
||||
DTZ=`date -u +%a@%H:%M:%S`
|
||||
echo "No files awaiting decoding at:" $DTZ >> $LOG_FILE
|
||||
sleep 45
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
|
|
@ -1,165 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# process_grib_files
|
||||
|
||||
# This script is called from MPE to create grib format files. If the
|
||||
# mpe_d2d_display_grib token is set to "ON", then the grib file is copied to
|
||||
# another directory and the notif_mpe.csh script is run. This script
|
||||
# notifies the grib decoder that a file is available for decoding into netCDF
|
||||
# format. The netCDF file will then be made available for display in D2D.
|
||||
|
||||
# Export the value of FXA_HOME here. This allows read_env.sh to be
|
||||
# sourced before set_hydro_env. This prevents set_hydro_env environment
|
||||
# and token settings from being overwritten by read_env.sh.
|
||||
# This has been added to allow the D2D log dir variable to be defined.
|
||||
#
|
||||
# An addition for OB7.2, this script will create a separate set of
|
||||
# GRIB files in the qpe_grib_sbn directory which have the sub center
|
||||
# code set to the id of the sending office. GRIB files created in the
|
||||
# standard qpe_grib directory have the sub center code set to 0.
|
||||
#
|
||||
# export FXA_HOME=/awips/fxa
|
||||
|
||||
# Set up the AWIPS environment.
|
||||
# . $FXA_HOME/readenv.sh
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export grib_bin_dir=$(get_apps_defaults pproc_bin)
|
||||
export grib_out_dir=$(get_apps_defaults mpe_grib_dir)
|
||||
export grib_in_dir=$(get_apps_defaults rfcwide_xmrg_dir)
|
||||
export MPE_SEND_QPE_TO_SBN=$(get_apps_defaults mpe_send_qpe_to_sbn)
|
||||
export MPE_QPE_SBN_DIR=$(get_apps_defaults mpe_qpe_sbn_dir)
|
||||
export MPE_QPE_GRIB_SBN_DIR=$(get_apps_defaults mpe_qpe_grib_sbn_dir)
|
||||
export MPE_LOG_DIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
export RFCWIDE_OUTPUT_DIR=$(get_apps_defaults rfcwide_output_dir)
|
||||
|
||||
#
|
||||
# Define input file name and output file name
|
||||
# in_file=xmrg file name
|
||||
# out_file=grib file name
|
||||
#
|
||||
|
||||
in_file=$1
|
||||
out_file=$2
|
||||
#
|
||||
# Create log file name
|
||||
#
|
||||
griblog=$MPE_LOG_DIR/process_grib_files.log
|
||||
tempname=$MPE_LOG_DIR/process_grib_files.tmp
|
||||
#
|
||||
#save only the latest in the log file
|
||||
#
|
||||
echo "-------------------------------" >> $griblog
|
||||
|
||||
tail -1200 $griblog > $tempname
|
||||
mv $tempname $griblog
|
||||
|
||||
Dte=`date -u`
|
||||
echo Starting process_grib_files at $Dte >> $griblog
|
||||
|
||||
echo Input xmrg file $grib_in_dir/$in_file >> $griblog
|
||||
echo Output grib file $grib_out_dir/$out_file >> $griblog
|
||||
|
||||
#
|
||||
# Run gribit to generate grib format file
|
||||
# input to gribit is xmrg format file, output from gribit is grib format file
|
||||
# grib_bin_dir is defined in set_hydro_env
|
||||
#
|
||||
|
||||
export grib_in_file=$in_file
|
||||
export grib_out_file=$out_file
|
||||
|
||||
#
|
||||
# Force the subcenter code to 0 in the created GRIB message.
|
||||
#
|
||||
export grib_set_subcenter_0=on
|
||||
|
||||
Dte=`date -u`
|
||||
echo Invoking $grib_bin_dir/gribit at $Dte >> $griblog
|
||||
|
||||
$grib_bin_dir/gribit.LX
|
||||
|
||||
#
|
||||
# At RFCs, a second version of the grib file needs to be created for
|
||||
# transmission over the SBN. This version of the GRIB file will contain
|
||||
# the code of the sending office.
|
||||
#
|
||||
|
||||
echo "The value of the mpe_send_qpe_to_sbn token is $MPE_SEND_QPE_TO_SBN" >> \
|
||||
$griblog
|
||||
|
||||
if [[ $MPE_SEND_QPE_TO_SBN = "ON" || $MPE_SEND_QPE_TO_SBN = "on" ]]
|
||||
then
|
||||
|
||||
echo "Producing GRIB file for transmission over the SBN." >> $griblog
|
||||
#
|
||||
# Build the path to the qpe SBN grib directory.
|
||||
#
|
||||
export grib_in_dir=$MPE_QPE_SBN_DIR
|
||||
export grib_out_dir=$MPE_QPE_GRIB_SBN_DIR
|
||||
#
|
||||
# This is important. Must make sure thet sub_center code is set to the
|
||||
# sending RFC.
|
||||
export grib_set_subcenter_0=off
|
||||
echo "The subcenter code will be set to represent the sending office." >> \
|
||||
$griblog
|
||||
echo Input xmrg file $grib_in_dir/$in_file >> $griblog
|
||||
echo Output grib file $grib_out_dir/$out_file >> $griblog
|
||||
|
||||
#
|
||||
# Call gribit
|
||||
#
|
||||
Dte=`date -u`
|
||||
echo Invoking $grib_bin_dir/gribit at $Dte >> $griblog
|
||||
$grib_bin_dir/gribit.LX
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# If token set to ON, then
|
||||
# (1) Copy grib file to temp file
|
||||
# (2) mv temp file to $d2d_grib_dir to ensure that file is complete
|
||||
# (3) Rename file by adding DDHHMMSS of creation time to name
|
||||
# (4) Run script notif_mpe.csh
|
||||
#
|
||||
|
||||
D2D_DISPLAY=$(get_apps_defaults mpe_d2d_display_grib)
|
||||
|
||||
echo Token mpe_d2d_display_grib is defined as $D2D_DISPLAY >> $griblog
|
||||
|
||||
if [[ $D2D_DISPLAY = "ON" || $D2D_DISPLAY = "on" ]]; then
|
||||
|
||||
new_string=`date -u +%d%H%M%S`
|
||||
new_file_name=ZETA98_${out_file%.*}_$new_string.grib
|
||||
|
||||
INPUT_DIR=$(get_apps_defaults d2d_input_dir)
|
||||
|
||||
echo Copy grib file $RFCWIDE_OUTPUT_DIR/qpe_grib/$out_file >> $griblog
|
||||
echo to $RFCWIDE_OUTPUT_DIR/qpe_grib/gribfile.tmp >> $griblog
|
||||
cp $RFCWIDE_OUTPUT_DIR/qpe_grib/$out_file $RFCWIDE_OUTPUT_DIR/qpe_grib/gribfile.tmp >> $griblog 2>&1
|
||||
|
||||
echo Move and rename grib file $RFCWIDE_OUTPUT_DIR/qpe_grib/gribfile.tmp >> $griblog
|
||||
echo to $INPUT_DIR/$new_file_name >> $griblog
|
||||
mv $RFCWIDE_OUTPUT_DIR/qpe_grib/gribfile.tmp $INPUT_DIR/$new_file_name >> $griblog 2>&1
|
||||
|
||||
# echo Invoking $FXA_BIN_DIR/notif_mpe.csh >> $griblog
|
||||
# $FXA_BIN_DIR/notif_mpe.csh
|
||||
|
||||
else
|
||||
echo Grib files are not converted to netCDF files to display on D2D >> $griblog
|
||||
|
||||
fi
|
||||
|
||||
|
||||
#
|
||||
# If token set to ON, then send file to NPVU
|
||||
#
|
||||
|
||||
#SEND_GRIB=$(get_apps_defaults mpe_send_grib)
|
||||
#if [[ $SEND_GRIB = "ON" || $SEND_GRIB = "on" ]]; then
|
||||
|
||||
#fi
|
||||
|
||||
Dte=`date -u`
|
||||
echo Completed process_grib_files at $Dte >> $griblog
|
|
@ -1,280 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# process_hpe grib_files
|
||||
|
||||
# This script is called to create grib format files for the
|
||||
# High-resolution Precipitation Estimator (HPE). These
|
||||
# are local xmrg mosaic files and will be converted to AWIPS GRIB1
|
||||
# format for conversion into AWIPS netCDF and display in D2D. If the
|
||||
# *_d2d_display_grib token is set to "ON", then the grib file is copied to
|
||||
# another directory and the notif_mpe.csh script is run. This script
|
||||
# notifies the grib decoder that a file is available for decoding into netCDF
|
||||
# format. The netCDF file will then be made available for display in D2D.
|
||||
#
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30, 2007
|
||||
|
||||
#
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export grib_bin_dir=$(get_apps_defaults pproc_bin)
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
|
||||
in_file=$1
|
||||
out_file=$2
|
||||
process=$3
|
||||
#
|
||||
# Create log file name
|
||||
#
|
||||
|
||||
griblog=$HPE_LOG_DIR/process_grib_files.log
|
||||
tempname=$HPE_LOG_DIR/process_grib_files.tmp
|
||||
|
||||
#
|
||||
#save only the latest in the log file
|
||||
#
|
||||
echo "-------------------------------" >> $griblog
|
||||
|
||||
tail -1200 $griblog > $tempname
|
||||
mv $tempname $griblog
|
||||
|
||||
Dte=`date -u`
|
||||
echo Starting process_hpe_grib_files at $Dte >> $griblog
|
||||
|
||||
if [[ $process = DHR* ]]
|
||||
then
|
||||
|
||||
export HPE_DHRMOSAIC_DIR=$(get_apps_defaults hpe_dhrmosaic_dir)
|
||||
export HPE_DHRMOSAIC_GRIB_DIR=$(get_apps_defaults hpe_dhrmosaic_grib_dir)
|
||||
echo $HPE_DHRMOSAIC_DIR >> $griblog
|
||||
echo $HPE_DHRMOSAIC_GRIB_DIR >> $griblog
|
||||
export grib_in_dir=$HPE_DHRMOSAIC_DIR
|
||||
export grib_out_dir=$HPE_DHRMOSAIC_GRIB_DIR
|
||||
echo Input xmrg file $HPE_DHRMOSAIC_DIR/$in_file >> $griblog
|
||||
echo Output grib file $HPE_DHRMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
elif [[ $process = BDHR* ]]
|
||||
then
|
||||
|
||||
export HPE_BDHRMOSAIC_DIR=$(get_apps_defaults hpe_bdhrmosaic_dir)
|
||||
export HPE_BDHRMOSAIC_GRIB_DIR=$(get_apps_defaults hpe_bdhrmosaic_grib_dir)
|
||||
echo $HPE_BDHRMOSAIC_DIR >> $griblog
|
||||
echo $HPE_BDHRMOSAIC_GRIB_DIR >> $griblog
|
||||
export grib_in_dir=$HPE_BDHRMOSAIC_DIR
|
||||
export grib_out_dir=$HPE_BDHRMOSAIC_GRIB_DIR
|
||||
echo Input xmrg file $HPE_BDHRMOSAIC_DIR/$in_file >> $griblog
|
||||
echo Output grib file $HPE_BDHRMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
elif [[ $process = BDSP* ]]
|
||||
then
|
||||
|
||||
export HPE_EBMOSAIC_DIR=$(get_apps_defaults hpe_ebmosaic_dir)
|
||||
export HPE_EBMOSAIC_GRIB_DIR=$(get_apps_defaults hpe_ebmosaic_grib_dir)
|
||||
echo $HPE_EBMOSAIC_DIR >> $griblog
|
||||
echo $HPE_EBMOSAIC_GRIB_DIR >> $griblog
|
||||
export grib_in_dir=$HPE_EBMOSAIC_DIR
|
||||
export grib_out_dir=$HPE_EBMOSAIC_GRIB_DIR
|
||||
echo Input xmrg file $HPE_EBMOSAIC_DIR/$in_file >> $griblog
|
||||
echo Output grib file $HPE_EBMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
elif [[ $process = DSP* ]]
|
||||
then
|
||||
|
||||
if [[ $in_file = ERMOSAIC*z ]]
|
||||
then
|
||||
|
||||
export HPE_ERMOSAIC_DIR=$(get_apps_defaults hpe_ermosaic_dir)
|
||||
elif [[ $in_file = MAXRMOSAIC*z ]]
|
||||
then
|
||||
|
||||
export HPE_ERMOSAIC_DIR=$(get_apps_defaults hpe_max_ermosaic_dir)
|
||||
elif [[ $in_file = AVGRMOSAIC*z ]]
|
||||
then
|
||||
|
||||
export HPE_ERMOSAIC_DIR=$(get_apps_defaults hpe_avg_ermosaic_dir)
|
||||
else
|
||||
echo "Incorrect RMOSAIC filename. Should be ER, AVGR, or MAXRMOSAIC " >> $griblog
|
||||
exit 1
|
||||
fi
|
||||
export HPE_ERMOSAIC_GRIB_DIR=$(get_apps_defaults hpe_ermosaic_grib_dir)
|
||||
echo $HPE_ERMOSAIC_DIR >> $griblog
|
||||
echo $HPE_ERMOSAIC_GRIB_DIR >> $griblog
|
||||
export grib_in_dir=$HPE_ERMOSAIC_DIR
|
||||
export grib_out_dir=$HPE_ERMOSAIC_GRIB_DIR
|
||||
echo Input xmrg file $HPE_ERMOSAIC_DIR/$in_file >> $griblog
|
||||
echo Output grib file $HPE_ERMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
elif [[ $process = BTP* ]]
|
||||
then
|
||||
|
||||
echo "Nowcast process flag passed to script. $process - exiting..." >> $griblog
|
||||
exit 1
|
||||
elif [[ $process = BPRT* ]]
|
||||
then
|
||||
|
||||
echo "Nowcast process flag passed to script. $process - exiting..." >> $griblog
|
||||
exit 1
|
||||
else
|
||||
echo "Unknown process flag passed to script. exiting..." >> $griblog
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
#
|
||||
# Run gribit to generate grib format file
|
||||
# input to gribit is xmrg format file, output from gribit is grib format file
|
||||
# grib_bin_dir is defined in set_hydro_env
|
||||
#
|
||||
|
||||
export grib_in_file=$in_file
|
||||
export grib_out_file=$out_file
|
||||
|
||||
#
|
||||
# Force the subcenter code to 0 in the created GRIB message.
|
||||
#
|
||||
export grib_set_subcenter_0=on
|
||||
|
||||
Dte=`date -u`
|
||||
echo Invoking $grib_bin_dir/gribit at $Dte >> $griblog
|
||||
|
||||
$grib_bin_dir/gribit.LX >> $griblog
|
||||
|
||||
#runso rary.ohd.pproc gribit_main_main_
|
||||
|
||||
if [[ $process = DHR* ]]
|
||||
then
|
||||
|
||||
D2D_DISPLAY=$(get_apps_defaults dhrmosaic_d2d_display_grib)
|
||||
|
||||
echo Token dhrmosaic_d2d_display_grib is defined as $D2D_DISPLAY >> $griblog
|
||||
|
||||
if [[ $D2D_DISPLAY = "ON" || $D2D_DISPLAY = "on" ]]; then
|
||||
|
||||
new_string=`date -u +%d%H%M%S`
|
||||
new_file_name=ZETA98_${out_file%.*}_$new_string.grib
|
||||
|
||||
INPUT_DIR=$(get_apps_defaults d2d_input_dir)
|
||||
|
||||
echo Copy grib file $HPE_DHRMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
echo to $HPE_DHRMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
cp $HPE_DHRMOSAIC_GRIB_DIR/$out_file $HPE_DHRMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog 2>&1
|
||||
|
||||
if [[ -s $HPE_DHRMOSAIC_GRIB_DIR/$out_file ]];
|
||||
then
|
||||
echo Move and rename grib file $HPE_DHRMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
echo to $INPUT_DIR/$new_file_name >> $griblog
|
||||
mv $HPE_DHRMOSAIC_GRIB_DIR/gribfile.tmp $INPUT_DIR/$new_file_name >> $griblog 2>&1
|
||||
|
||||
else
|
||||
echo Not moving grib file, zero bytes, removing temp file. >> $griblog
|
||||
rm $HPE_DHRMOSAIC_GRIB_DIR/gribfile.tmp
|
||||
fi
|
||||
|
||||
else
|
||||
echo DHR Mosaic Grib files were not created for display on D2D >> $griblog
|
||||
|
||||
fi
|
||||
elif [[ $process = BDHR* ]]
|
||||
then
|
||||
|
||||
D2D_DISPLAY=$(get_apps_defaults bdhrmosaic_d2d_display_grib)
|
||||
|
||||
echo Token bdhrmosaic_d2d_display_grib is defined as $D2D_DISPLAY >> $griblog
|
||||
|
||||
if [[ $D2D_DISPLAY = "ON" || $D2D_DISPLAY = "on" ]]; then
|
||||
|
||||
new_string=`date -u +%d%H%M%S`
|
||||
new_file_name=ZETA98_${out_file%.*}_$new_string.grib
|
||||
|
||||
INPUT_DIR=$(get_apps_defaults d2d_input_dir)
|
||||
|
||||
echo Copy grib file $HPE_BDHRMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
echo to $HPE_BDHRMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
cp $HPE_BDHRMOSAIC_GRIB_DIR/$out_file $HPE_BDHRMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog 2>&1
|
||||
|
||||
if [[ -s $HPE_BDHRMOSAIC_GRIB_DIR/$out_file ]];
|
||||
then
|
||||
echo Move and rename grib file $HPE_BDHRMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
echo to $INPUT_DIR/$new_file_name >> $griblog
|
||||
mv $HPE_BDHRMOSAIC_GRIB_DIR/gribfile.tmp $INPUT_DIR/$new_file_name >> $griblog 2>&1
|
||||
|
||||
else
|
||||
echo Not moving grib file, zero bytes, removing temp file. >> $griblog
|
||||
rm $HPE_BDHRMOSAIC_GRIB_DIR/gribfile.tmp
|
||||
fi
|
||||
|
||||
else
|
||||
echo BDHR Mosaic Grib files were not created for display on D2D >> $griblog
|
||||
|
||||
fi
|
||||
elif [[ $process = BDSP* ]]
|
||||
then
|
||||
|
||||
D2D_DISPLAY=$(get_apps_defaults ebmosaic_d2d_display_grib)
|
||||
|
||||
echo Token ebmosaic_d2d_display_grib is defined as $D2D_DISPLAY >> $griblog
|
||||
|
||||
if [[ $D2D_DISPLAY = "ON" || $D2D_DISPLAY = "on" ]]; then
|
||||
|
||||
new_string=`date -u +%d%H%M%S`
|
||||
new_file_name=ZETA98_${out_file%.*}_$new_string.grib
|
||||
|
||||
INPUT_DIR=$(get_apps_defaults d2d_input_dir)
|
||||
|
||||
echo Copy grib file $HPE_EBMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
echo to $HPE_EBMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
cp $HPE_EBMOSAIC_GRIB_DIR/$out_file $HPE_EBMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog 2>&1
|
||||
|
||||
if [[ -s $HPE_EBMOSAIC_GRIB_DIR/$out_file ]];
|
||||
then
|
||||
echo Move and rename grib file $HPE_EBMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
echo to $INPUT_DIR/$new_file_name >> $griblog
|
||||
mv $HPE_EBMOSAIC_GRIB_DIR/gribfile.tmp $INPUT_DIR/$new_file_name >> $griblog 2>&1
|
||||
|
||||
else
|
||||
echo Not moving grib file, zero bytes, removing temp file. >> $griblog
|
||||
rm $HPE_EBMOSAIC_GRIB_DIR/gribfile.tmp
|
||||
fi
|
||||
|
||||
else
|
||||
echo EB Mosaic Grib files were not created for display on D2D >> $griblog
|
||||
|
||||
fi
|
||||
elif [[ $process = DSP* ]]
|
||||
then
|
||||
|
||||
D2D_DISPLAY=$(get_apps_defaults ermosaic_d2d_display_grib)
|
||||
|
||||
echo Token ermosaic_d2d_display_grib is defined as $D2D_DISPLAY >> $griblog
|
||||
|
||||
if [[ $D2D_DISPLAY = "ON" || $D2D_DISPLAY = "on" ]]; then
|
||||
|
||||
new_string=`date -u +%d%H%M%S`
|
||||
new_file_name=ZETA98_${out_file%.*}_$new_string.grib
|
||||
|
||||
INPUT_DIR=$(get_apps_defaults d2d_input_dir)
|
||||
|
||||
echo Copy grib file $HPE_ERMOSAIC_GRIB_DIR/$out_file >> $griblog
|
||||
echo to $HPE_ERMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
cp $HPE_ERMOSAIC_GRIB_DIR/$out_file $HPE_ERMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog 2>&1
|
||||
|
||||
if [[ -s $HPE_ERMOSAIC_GRIB_DIR/$out_file ]];
|
||||
then
|
||||
echo Move and rename grib file $HPE_ERMOSAIC_GRIB_DIR/gribfile.tmp >> $griblog
|
||||
echo to $INPUT_DIR/$new_file_name >> $griblog
|
||||
mv $HPE_ERMOSAIC_GRIB_DIR/gribfile.tmp $INPUT_DIR/$new_file_name >> $griblog 2>&1
|
||||
|
||||
else
|
||||
echo Not moving grib file, zero bytes, removing temp file. >> $griblog
|
||||
rm $HPE_ERMOSAIC_GRIB_DIR/gribfile.tmp
|
||||
fi
|
||||
|
||||
else
|
||||
echo ER Mosaic Grib files were not converted to netCDF files to display on D2D >> $griblog
|
||||
|
||||
fi
|
||||
else
|
||||
|
||||
echo HPE Grib files were not moved to display on D2D >> $griblog
|
||||
|
||||
fi
|
||||
Dte=`date -u`
|
||||
echo Completed process_hpe_grib_files at $Dte >> $griblog
|
||||
exit 0
|
|
@ -1,103 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
###############################################################################
|
||||
# FILENAME: process_rfc_bias
|
||||
# DESCRIPTION: This script is invoked via the message handling service (MHS)
|
||||
# mechanisms. It is called when a rfc bias message file
|
||||
# from a RFC is received at the local WFO. This script copies
|
||||
# this file to a local directory where it is then processed
|
||||
# by the process_rfc_bias program.
|
||||
#
|
||||
# The MHS places the incoming file in the directory:
|
||||
# /data/x400/mhs/msg/hydro
|
||||
# This script copies this file to the directory:
|
||||
# /awips/hydroapps/precip_proc/local/data/rfc_bias_input
|
||||
#
|
||||
#
|
||||
# The arguments to this script are provided via the
|
||||
# message handling system (MHS) and are translated
|
||||
# from the MHS args %ENCLOSE(1) %SUBJECT %MSGTYPE
|
||||
#
|
||||
#
|
||||
# USAGE: process_rfc_bias filename subject_string msgtype
|
||||
#
|
||||
# AUTHOR: Bryon Lawrence
|
||||
# CREATION DATE: April 5, 2007
|
||||
# ORGANIZATION: OHD-11/HSEB
|
||||
# MACHINE/SHELL: Korn shell
|
||||
# MODIFICATION HISTORY:
|
||||
# DATE PROGRAMMER DESCRIPTION/REASON
|
||||
# 4/5/2007 B. Lawrence Created.
|
||||
################################################################################
|
||||
#
|
||||
|
||||
# This allows this script to be run from outside of the bin directory
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Set up the WHFS runtime environment
|
||||
export FXA_HOME=/awips/fxa
|
||||
. $FXA_HOME/readenv.sh
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export RFC_BIAS_INPUT_DIR=$(get_apps_defaults rfc_bias_input_dir)
|
||||
export PROCESS_BIAS_LOG_DIR=$(get_apps_defaults process_bias_log_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
export CLASSPATH=$DB_DRIVER_PATH:$PPROC_BIN/bias_trans.jar
|
||||
|
||||
# Prepare the log file.
|
||||
process_bias_log=`date +$PROCESS_BIAS_LOG_DIR/process_rfc_bias_%m%d`
|
||||
|
||||
echo "--------------------------------------------------" >> $process_bias_log
|
||||
echo "Starting process_rfc_bias as $LOGNAME at $(date -u +"%T %Z") on \
|
||||
$(date -u +"%a %b %d %Y") on $HOSTNAME" >> $process_bias_log
|
||||
|
||||
FILENAME=$1
|
||||
SUBJECT=$2
|
||||
MSGTYPE=$3
|
||||
|
||||
echo "The filename supplied by the MHS: $FILENAME" >> $process_bias_log
|
||||
echo "The subject line supplied by the MHS: $SUBJECT" >> $process_bias_log
|
||||
echo "The type of message supplied by the MHS: $MSGTYPE" >> $process_bias_log
|
||||
|
||||
#
|
||||
# Use a shell function to extract the argument from the subject line
|
||||
# the subject string is a quoted string with a filename, followed
|
||||
# by a space, followed by the product identifier.
|
||||
parseSubject()
|
||||
{
|
||||
DESCRIP=$1
|
||||
PRODUCTID=$2
|
||||
return
|
||||
}
|
||||
|
||||
#
|
||||
# Create the destination filename and path.
|
||||
parseSubject $SUBJECT
|
||||
DATESTR=`date -u +%m%d`
|
||||
TIMESTR=`date -u +%H%M%S`
|
||||
DESTINATION_FILENAME=$RFC_BIAS_INPUT_DIR/$PRODUCTID.$DATESTR.$TIMESTR
|
||||
|
||||
#
|
||||
# Copy the MHS rfc bias file to the rfc_bias_input directory
|
||||
echo "Copying $FILENAME to $DESTINATION_FILENAME" >> $process_bias_log
|
||||
cp -f $FILENAME $DESTINATION_FILENAME >> $process_bias_log 2>&1
|
||||
|
||||
if [[ $? -ne 0 ]]
|
||||
then
|
||||
echo "The copy of $FILENAME to $DESTINATION_FILENAME failed." >> \
|
||||
$process_bias_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Start the process_rfc_bias.LX program
|
||||
echo "Starting the bias message reading program" >> $process_bias_log
|
||||
echo "${SYS_JAVA_DIR}/bin/java ohd.hseb.bias_trans/BiasMessageReader $JDBCURL $DESTINATION_FILENAME" >> $process_bias_log
|
||||
|
||||
${SYS_JAVA_DIR}/bin/java ohd.hseb.bias_trans/BiasMessageReader $JDBCURL \
|
||||
$DESTINATION_FILENAME >> $process_bias_log 2>&1
|
||||
#
|
||||
# Remove the bias message file
|
||||
rm -f $DESTINATION_FILENAME >> $process_bias_log 2>&1
|
||||
|
||||
exit 0
|
|
@ -1,494 +0,0 @@
|
|||
#! /bin/ksh
|
||||
# purge_hpe_files
|
||||
# This script purges HPE (High-resolution Precipitation Estimator) files
|
||||
# according to retention period requirements.
|
||||
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30 2007 (adapted from purge_mpe_files)
|
||||
|
||||
# This allows you to run this script from outside of ./whfs/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOme environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export DSP_LOG_DIR=$(get_apps_defaults dsp_log_dir)
|
||||
export DHR_LOG_DIR=$(get_apps_defaults dhr_log_dir)
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
export PPROC_LOCAL_DATA=$(get_apps_defaults pproc_local_data)
|
||||
|
||||
HPELOGS=$HPE_LOG_DIR
|
||||
|
||||
hpe_purge_error=`date -u +error_hpepurge.%Y%m%d%H%M%S`
|
||||
|
||||
if [[ -d $HPELOGS ]]
|
||||
then
|
||||
# set errorlog
|
||||
|
||||
errorlog=$HPELOGS/$hpe_purge_error
|
||||
|
||||
# GAD_DIR is directory which holds the gad program (get_apps_defaults.LX)
|
||||
|
||||
#
|
||||
# get purge file settings
|
||||
|
||||
LOGAGE=`get_apps_defaults hpe_purge_logage`
|
||||
FILEAGE=`get_apps_defaults hpe_purge_fileage`
|
||||
XMRGAGE=`get_apps_defaults hpe_purge_xmrgage`
|
||||
|
||||
# set defaults should the tokens above be missing or set incorrectly
|
||||
|
||||
if [[ $LOGAGE -le 0 ]]
|
||||
then
|
||||
LOGAGE=720
|
||||
fi
|
||||
|
||||
if [[ $FILEAGE -le 0 ]]
|
||||
then
|
||||
FILEAGE=180
|
||||
fi
|
||||
|
||||
if [[ $XMRGAGE -le 0 ]]
|
||||
then
|
||||
XMRGAGE=75
|
||||
fi
|
||||
|
||||
# remove empty error logs
|
||||
|
||||
if [[ ! -s $errorlog ]]
|
||||
then
|
||||
rm -f $errorlog
|
||||
fi
|
||||
|
||||
Dte=`date -u`
|
||||
|
||||
# Setup file names
|
||||
#
|
||||
fnm=$HPELOGS/purge_hpe_files.log
|
||||
tmp=$HPELOGS/purge_hpe_files.tmp
|
||||
|
||||
echo `basename $0` log location: $fnm
|
||||
|
||||
# Save only the latest events in the log file
|
||||
#
|
||||
if [ -e "${fnm}" ]
|
||||
then
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
chmod 777 $fnm
|
||||
fi
|
||||
|
||||
|
||||
# Log a header line to the output file
|
||||
#
|
||||
echo "----------------------------------------------------" >> $fnm
|
||||
echo "Begin purge_hpe_files at: " $Dte >> $fnm
|
||||
|
||||
else
|
||||
echo "HPE log directory $HPELOGS incorrect " >> $errorlog
|
||||
echo "Check .Apps_defaults and set_hydro_env for correctness " >> $errorlog
|
||||
echo "purge_hpe_files exiting with error " >> $errorlog
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note that the line below is the way this should be established
|
||||
# However, since files are being purged, best to hard code to ensure
|
||||
# that the files to be deleted are the correct ones
|
||||
|
||||
#HPE_OUTPUT_DIR=$PPROC_LOCAL_DATA/hpe
|
||||
HPE_OUTPUT_DIR=${apps_dir}/precip_proc/local/data/hpe
|
||||
|
||||
if [[ -d $HPE_OUTPUT_DIR ]]
|
||||
then
|
||||
HPEHEIGHT=$HPE_OUTPUT_DIR/height
|
||||
HPEINDEX=$HPE_OUTPUT_DIR/index
|
||||
HPEDHRMOSAIC=$HPE_OUTPUT_DIR/dhrmosaic
|
||||
HPEERMOSAIC=$HPE_OUTPUT_DIR/ermosaic
|
||||
HPEEBMOSAIC=$HPE_OUTPUT_DIR/ebmosaic
|
||||
HPEBDHRMOSAIC=$HPE_OUTPUT_DIR/bdhrmosaic
|
||||
HPEAVGMOSAIC=$HPE_OUTPUT_DIR/avgrmosaic
|
||||
HPEMAXMOSAIC=$HPE_OUTPUT_DIR/maxrmosaic
|
||||
HPENOWCAST=$HPE_OUTPUT_DIR/nowcast
|
||||
|
||||
|
||||
HPEDHRGRIB=$HPEDHRMOSAIC/grib
|
||||
HPEDHRNETCDF=$HPEDHRMOSAIC/netcdf
|
||||
HPEDHRGIF=$HPEDHRMOSAIC/gif
|
||||
|
||||
HPEBDHRGRIB=$HPEBDHRMOSAIC/grib
|
||||
HPEBDHRNETCDF=$HPEBDHRMOSAIC/netcdf
|
||||
HPEBDHRGIF=$HPEBDHRMOSAIC/gif
|
||||
|
||||
HPEERGRIB=$HPEERMOSAIC/grib
|
||||
HPEERNETCDF=$HPEERMOSAIC/netcdf
|
||||
HPEERGIF=$HPEERMOSAIC/gif
|
||||
|
||||
HPEEBGRIB=$HPEEBMOSAIC/grib
|
||||
HPEEBNETCDF=$HPEEBMOSAIC/netcdf
|
||||
HPEEBGIF=$HPEEBMOSAIC/gif
|
||||
else
|
||||
echo "HPE directory $HPE_OUTPUT_DIR not valid. " >> $fnm
|
||||
echo "Check for correct directory name " >> $fnm
|
||||
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
|
||||
echo "Exiting purge_hpe_files with error at: " $Dte >> $fnm
|
||||
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
DHRXMRG=${apps_dir}/precip_proc/local/data/dhr_decoded
|
||||
DHRARCHIVE=${apps_dir}/precip_proc/local/data/dhr_archive
|
||||
DHRERROR=${apps_dir}/precip_proc/local/data/dhr_error
|
||||
|
||||
|
||||
DSPXMRG=${apps_dir}/precip_proc/local/data/dsp_decoded
|
||||
DSPARCHIVE=${apps_dir}/precip_proc/local/data/dsp_archive
|
||||
DSPERROR=${apps_dir}/precip_proc/local/data/dsp_error
|
||||
|
||||
# Note kept these lines in case there's enough confidence to
|
||||
# use the $PPROC_LOCAL_DATA value
|
||||
# But with purging, must ensure what's purged is what was
|
||||
# meant to be.
|
||||
|
||||
#DHRXMRG=$PPROC_LOCAL_DATA/dhr_decoded
|
||||
#DHRARCHIVE=$PPROC_LOCAL_DATA/dhr_archive
|
||||
#DHRERROR=$PPROC_LOCAL_DATA/dhr_error
|
||||
|
||||
|
||||
#DSPXMRG=$PPROC_LOCAL_DATA/dsp_decoded
|
||||
#DSPARCHIVE=$PPROC_LOCAL_DATA/dsp_archive
|
||||
#DSPERROR=$PPROC_LOCAL_DATA/dsp_error
|
||||
|
||||
# Purge hpe log files generated by hpe_fieldgen
|
||||
#
|
||||
|
||||
echo " " >> $fnm
|
||||
echo " hpe_fieldgen log files " >> $fnm
|
||||
|
||||
if [[ -d $HPELOGS && -n "$HPELOGS" ]]
|
||||
then
|
||||
find $HPELOGS -type f -name 'HPE*' -mmin +$LOGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPELOGS -type f -name 'error.*' -mmin +$LOGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPELOGS -type f -name 'print.*' -mmin +$LOGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE LOG DIR not valid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $DHR_LOG_DIR && -n "$DHR_LOG_DIR" ]]
|
||||
then
|
||||
find $DHR_LOG_DIR -type f -name 'decodedhr*' -mmin +$LOGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "DHR LOG DIR not valid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $DSP_LOG_DIR && -n "$DSP_LOG_DIR" ]]
|
||||
then
|
||||
find $DSP_LOG_DIR -type f -name 'decodedsp*' -mmin +$LOGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "DSP LOG DIR not valid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
#Purge hpe gridded field files
|
||||
#
|
||||
|
||||
echo " " >> $fnm
|
||||
echo " hpe gridded field files " >> $fnm
|
||||
|
||||
if [[ -d $HPEHEIGHT && -n "$HPEHEIGHT" ]]
|
||||
then
|
||||
find $HPEHEIGHT -type f -name '*HEIGHT*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE height directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEINDEX && -n "$HPEINDEX" ]]
|
||||
then
|
||||
find $HPEINDEX -type f -name '*INDEX*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE index directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEDHRMOSAIC && -n "$HPEDHRMOSAIC" ]]
|
||||
then
|
||||
find $HPEDHRMOSAIC -type f -name 'DHRMOSAIC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE DHR Mosaic directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEBDHRMOSAIC && -n "$HPEBDHRMOSAIC" ]]
|
||||
then
|
||||
find $HPEBDHRMOSAIC -type f -name 'BDHRMOSAIC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE Bias DHR Mosaic directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEERMOSAIC && -n "$HPEERMOSAIC" ]]
|
||||
then
|
||||
find $HPEERMOSAIC -type f -name 'ERMOSAIC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE ER Mosaic directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEAVGMOSAIC && -n "$HPEAVGMOSAIC" ]]
|
||||
then
|
||||
find $HPEAVGMOSAIC -type f -name 'AVGRMOSAIC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE AVG Mosaic directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEMAXMOSAIC && -n "$HPEMAXMOSAIC" ]]
|
||||
then
|
||||
find $HPEMAXMOSAIC -type f -name 'MAXRMOSAIC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE MAX Mosaic directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEEBMOSAIC && -n "$HPEEBMOSAIC" ]]
|
||||
then
|
||||
find $HPEEBMOSAIC -type f -name 'EBMOSAIC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE EB Mosaic directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
if [[ -d $HPENOWCAST && -n "$HPENOWCAST" ]]
|
||||
then
|
||||
find $HPENOWCAST -type f -name 'BPRT*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPENOWCAST -type f -name 'BPRT*z.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $HPENOWCAST -type f -name 'PRT*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPENOWCAST -type f -name 'PRT*z.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $HPENOWCAST -type f -name 'ACC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPENOWCAST -type f -name 'ACC*z.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $HPENOWCAST -type f -name 'BACC*z' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPENOWCAST -type f -name 'BACC*z.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
|
||||
else
|
||||
echo "HPE Nowcast directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" >> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [[ -d $HPEDHRGRIB && -n "$HPEDHRGRIB" ]]
|
||||
then
|
||||
find $HPEDHRGRIB -type f -name 'DHRMOSAIC*.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE DHR Mosaic GRIB directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEDHRNETCDF && -n "$HPEDHRNETCDF" ]]
|
||||
then
|
||||
find $HPEDHRNETCDF -type f -name 'DHRMOSAIC*.nc' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE DHR Mosaic NetCDF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEDHRGIF && -n "$HPEDHRGIF" ]]
|
||||
then
|
||||
find $HPEDHRGIF -type f -name 'DHRMOSAIC*.gif' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE DHR Mosaic GIF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEBDHRGRIB && -n "$HPEBDHRGRIB" ]]
|
||||
then
|
||||
find $HPEBDHRGRIB -type f -name 'BDHRMOSAIC*.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE Bias DHR Mosaic GRIB directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEBDHRNETCDF && -n "$HPEBDHRNETCDF" ]]
|
||||
then
|
||||
find $HPEBDHRNETCDF -type f -name 'BDHRMOSAIC*.nc' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE Bias DHR Mosaic NetCDF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEBDHRGIF && -n "$HPEBDHRGIF" ]]
|
||||
then
|
||||
find $HPEBDHRGIF -type f -name 'BDHRMOSAIC*.gif' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE Bias DHR Mosaic GIF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEERGRIB && -n "$HPEERGRIB" ]]
|
||||
then
|
||||
find $HPEERGRIB -type f -name 'ERMOSAIC*.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPEERGRIB -type f -name 'AVGRMOSAIC*.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPEERGRIB -type f -name 'MAXRMOSAIC*.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
else
|
||||
echo "HPE ER Mosaic GRIB directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEERNETCDF && -n "$HPEERNETCDF" ]]
|
||||
then
|
||||
find $HPEERNETCDF -type f -name 'ERMOSAIC*.nc' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPEERNETCDF -type f -name 'AVGRMOSAIC*.nc' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPEERNETCDF -type f -name 'MAXRMOSAIC*.nc' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
else
|
||||
echo "HPE ER Mosaic NetCDF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEERGIF && -n "$HPEERGIF" ]]
|
||||
then
|
||||
find $HPEERGIF -type f -name 'ERMOSAIC*.gif' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPEERGIF -type f -name 'AVGRMOSAIC*.gif' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $HPEERGIF -type f -name 'MAXRMOSAIC*.gif' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
else
|
||||
echo "HPE ER Mosaic GIF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEEBGRIB && -n "$HPEEBGRIB" ]]
|
||||
then
|
||||
find $HPEEBGRIB -type f -name 'EBMOSAIC*.grib' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE EB Mosaic GRIB directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEEBNETCDF && -n "$HPEEBNETCDF" ]]
|
||||
then
|
||||
find $HPEEBNETCDF -type f -name 'EBMOSAIC*.nc' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE EB Mosaic NetCDF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $HPEEBGIF && -n "$HPEEBGIF" ]]
|
||||
then
|
||||
find $HPEEBGIF -type f -name 'EBMOSAIC*.gif' -mmin +$FILEAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "HPE EB Mosaic GIF directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $DHRXMRG && -n "$DHRXMRG" ]]
|
||||
then
|
||||
find $DHRXMRG -type f -name 'DHR*Z' -mmin +$XMRGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "DHR XMRG directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
if [[ -d $DSPXMRG && -n "$DSPXMRG" ]]
|
||||
then
|
||||
find $DSPXMRG -type f -name 'DSP*Z' -mmin +$XMRGAGE -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
else
|
||||
echo "DSP XMRG directory invalid. " >> $fnm
|
||||
echo "Check .Apps_defaults settings or set_hydro_env script" \
|
||||
>> $fnm
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End purge_hpe_files at: " $Dte >> $fnm
|
||||
|
||||
exit 0
|
||||
|
||||
#End of purge_hpe_files
|
|
@ -1,295 +0,0 @@
|
|||
#! /bin/ksh
|
||||
# purge_mpe_files
|
||||
# This script purges mpe (Multi-sensor-Precipitation Estimator)files
|
||||
# History: Deng, Jingtao August 09, 2001
|
||||
# modified by P. Tilles 4/22/2002
|
||||
# - removed stage3 directories
|
||||
# - added jpeg, netCDF, grib directories
|
||||
# modified by P. Tilles 10/2002
|
||||
# - added DISAGG_LOG_DIR purge
|
||||
# modified by P. Tilles 9/2003
|
||||
# - added LIGHTNING_LOG_DIR purge
|
||||
# March 9 2004 - added purge of mlmosaic files - pst
|
||||
# March 25 2004 - added purge of lsatpre files - pst
|
||||
# - removed delete from satel_precip dir
|
||||
# March 30 2004 - added purge of state var files
|
||||
# for local bias corr satellite - pst
|
||||
# May 5, 2006 - Modified to fix mistakes made in logic
|
||||
# to purge MAXRMOSAIC, AVGRMOSAIC, and
|
||||
# P3LMOSAIC fields. Added logic to purge
|
||||
# GAGETRIANGLES.
|
||||
# May 5, 2006 - Modified to purge DailyQC files.
|
||||
# Nov 17, 2006 - Modified to purge sbn grib and sbn qpe directories.
|
||||
# May 2007 - Added SRG related fields (3 new fields)
|
||||
# - added "-type f" to all find commands
|
||||
# - added directories for RFC bias transfer RFC bias fields
|
||||
# Sep 2007 - removed stray "i" from delete on FREEZEGRID directory
|
||||
# Nov 2007 - added purge of disagg logs from mpe_editor dir
|
||||
# - changed purge of mpe_fieldgen logs to purge filenames mpe_*
|
||||
# Dec 2007 - removed purge of old disagg logs from old disagg app
|
||||
# Jan 2008 - added purge of DailyQC freezing level preprocessor logs.
|
||||
|
||||
# This allows you to run this script from outside of ./whfs/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up some environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
. $RUN_FROM_DIR/../../check_app_context
|
||||
|
||||
export LIGHTNING_LOG_DIR=$(get_apps_defaults lightning_log_dir)
|
||||
export GAQ_LOG_DIR=$(get_apps_defaults gaq_log_dir)
|
||||
export MPE_QPE_SBN_DIR=$(get_apps_defaults mpe_qpe_sbn_dir)
|
||||
export MPE_QPE_GRIB_SBN_DIR=$(get_apps_defaults mpe_qpe_grib_sbn_dir)
|
||||
export MPE_GAGEQC_DIR=$(get_apps_defaults mpe_gageqc_dir)
|
||||
export MPE_LOG_DIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
export RFCWIDE_OUTPUT_DIR=$(get_apps_defaults rfcwide_output_dir)
|
||||
export PPROC_LOG=$(get_apps_defaults pproc_log)
|
||||
|
||||
#
|
||||
# Define directories for MPE data.
|
||||
MPELOGS=$MPE_LOG_DIR
|
||||
MPEEDITLOGS=$PPROC_LOG/mpe_editor
|
||||
RFCBIASLOGS=$PPROC_LOG/process_bias_message
|
||||
GAQLOGS=$GAQ_LOG_DIR
|
||||
|
||||
GAGETRIANGLES=$RFCWIDE_OUTPUT_DIR/gagetriangles
|
||||
RWHEIGHT=$RFCWIDE_OUTPUT_DIR/height
|
||||
RWINDEX=$RFCWIDE_OUTPUT_DIR/index
|
||||
MAXRMOSAIC=$RFCWIDE_OUTPUT_DIR/maxrmosaic
|
||||
AVGRMOSAIC=$RFCWIDE_OUTPUT_DIR/avgrmosaic
|
||||
P3LMOSAIC=$RFCWIDE_OUTPUT_DIR/p3lmosaic
|
||||
RWBMOSAIC=$RFCWIDE_OUTPUT_DIR/bmosaic
|
||||
RWRMOSAIC=$RFCWIDE_OUTPUT_DIR/rmosaic
|
||||
RWMMOSAIC=$RFCWIDE_OUTPUT_DIR/mmosaic
|
||||
RWMLMOSAIC=$RFCWIDE_OUTPUT_DIR/mlmosaic
|
||||
RWLSATPRE=$RFCWIDE_OUTPUT_DIR/lsatpre
|
||||
RWLMOSAIC=$RFCWIDE_OUTPUT_DIR/lmosaic
|
||||
RWLOCSPAN=$RFCWIDE_OUTPUT_DIR/locspan
|
||||
RWLOCBIAS=$RFCWIDE_OUTPUT_DIR/locbias
|
||||
RWSTATEVAR=$RFCWIDE_OUTPUT_DIR/state_var
|
||||
RWSATSTVAR=$RFCWIDE_OUTPUT_DIR/sat_state_var
|
||||
RWGAG=$RFCWIDE_OUTPUT_DIR/gageonly
|
||||
RWXMRG=$RFCWIDE_OUTPUT_DIR/qpe
|
||||
RWDRAWPRECIP=$RFCWIDE_OUTPUT_DIR/draw_precip
|
||||
|
||||
MPESRMOSAIC=$RFCWIDE_OUTPUT_DIR/srmosaic
|
||||
MPESGMOSAIC=$RFCWIDE_OUTPUT_DIR/sgmosaic
|
||||
MPESRGMOSAIC=$RFCWIDE_OUTPUT_DIR/srgmosaic
|
||||
|
||||
MPERFCBMOSAIC=$RFCWIDE_OUTPUT_DIR/rfcbmosaic
|
||||
MPERFCMMOSAIC=$RFCWIDE_OUTPUT_DIR/rfcmmosaic
|
||||
|
||||
MPEGIF=$RFCWIDE_OUTPUT_DIR/qpe_gif
|
||||
MPEGRIB=$RFCWIDE_OUTPUT_DIR/qpe_grib
|
||||
MPEGRIBSBN=$MPE_QPE_GRIB_SBN_DIR
|
||||
MPEQPESBN=$MPE_QPE_SBN_DIR
|
||||
MPEJPEG=$RFCWIDE_OUTPUT_DIR/qpe_jpeg
|
||||
MPENETCDF=$RFCWIDE_OUTPUT_DIR/qpe_netcdf
|
||||
|
||||
LIGHTNING_LOG=$LIGHTNING_LOG_DIR
|
||||
|
||||
RFCQPETEMP=$RFCWIDE_OUTPUT_DIR/rfcqpe_temp
|
||||
RFCQPE01=$RFCWIDE_OUTPUT_DIR/rfcqpe01
|
||||
RFCQPE06=$RFCWIDE_OUTPUT_DIR/rfcqpe06
|
||||
RFCQPE24=$RFCWIDE_OUTPUT_DIR/rfcqpe24
|
||||
|
||||
RFCBIASINDIR=$RFCWIDE_OUTPUT_DIR/bias_message_input
|
||||
RFCBIASOUTDIR=$RFCWIDE_OUTPUT_DIR/bias_message_output
|
||||
|
||||
#
|
||||
# Define directories for DailyQC data
|
||||
SCRATCH=$MPE_GAGEQC_DIR/scratch
|
||||
|
||||
PRECIPPOINT=$MPE_GAGEQC_DIR/precip/point
|
||||
PRECIPMAP=$MPE_GAGEQC_DIR/precip/MAP
|
||||
PRECIPGRID=$MPE_GAGEQC_DIR/precip/grid
|
||||
PRECIPBAD=$MPE_GAGEQC_DIR/precip/bad
|
||||
PRECIPDEV=$MPE_GAGEQC_DIR/precip/dev
|
||||
|
||||
TEMPPOINT=$MPE_GAGEQC_DIR/temperature/point
|
||||
TEMPMAT=$MPE_GAGEQC_DIR/temperature/MAT
|
||||
TEMPGRID=$MPE_GAGEQC_DIR/temperature/grid
|
||||
TEMPBAD=$MPE_GAGEQC_DIR/temperature/bad
|
||||
TEMPDEV=$MPE_GAGEQC_DIR/temperature/dev
|
||||
|
||||
FREEZEPOINT=$MPE_GAGEQC_DIR/freezing_level/point
|
||||
FREEZEMAZ=$MPE_GAGEQC_DIR/freezing_level/MAZ
|
||||
FREEZEGRID=$MPE_GAGEQC_DIR/freezing_level/grid
|
||||
|
||||
Dte=`date -u`
|
||||
|
||||
# Setup file names
|
||||
#
|
||||
fnm=$PPROC_LOG/misc/purge_mpe_files.log
|
||||
tmp=$PPROC_LOG/misc/purge_mpe_files.tmp
|
||||
|
||||
echo `basename $0` log location: $fnm
|
||||
|
||||
# Save only the latest events in the log file
|
||||
#
|
||||
if [ -e "${fnm}" ]
|
||||
then
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
chmod 777 $fnm
|
||||
fi
|
||||
|
||||
|
||||
# Log a header line to the output file
|
||||
#
|
||||
echo "----------------------------------------------------" >> $fnm
|
||||
echo "Begin purge_mpe_files at: " $Dte >> $fnm
|
||||
|
||||
|
||||
# Purge mpe log files generated by mpe_fieldgen
|
||||
#
|
||||
|
||||
echo " " >> $fnm
|
||||
echo " mpe_fieldgen log files " >> $fnm
|
||||
find $MPELOGS -name 'mpe_*' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPELOGS -name 'error.*' -type f -mtime +10 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $MPEEDITLOGS -name 'disagg*' -type f -mtime +2 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEEDITLOGS -name 'mpe_editor*' -type f -mtime +2 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEEDITLOGS -name 'mpe_pre*' -type f -mtime +2 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEEDITLOGS -name 'freez*' -type f -mtime +2 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
|
||||
find $GAQLOGS -name 'process_qpe*' -type f -mtime +3 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $GAQLOGS -name 'transmit_rfc*' -type f -mtime +3 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
|
||||
|
||||
#Purge mpe gridded field files older than 2 days
|
||||
#For gif/jpeg/grib/netcdf and state variables, purge files older than 1 day
|
||||
#
|
||||
echo " " >> $fnm
|
||||
echo " mpe gridded field files " >> $fnm
|
||||
find $RWMMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWLSATPRE -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWMLMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWRMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWBMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWLMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWLOCSPAN -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $P3LMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $GAGETRIANGLES -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $AVGRMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MAXRMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWLOCBIAS -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWSTATEVAR -name '*z' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWSATSTVAR -name '*z' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWHEIGHT -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWINDEX -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWGAG -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWXMRG -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RWDRAWPRECIP -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEQPESBN -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $MPESRMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPESGMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPESRGMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $MPEGIF -name '*.gif' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEGRIB -name '*.grib' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEGRIBSBN -name '*.grib' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPEJPEG -name '*.jpg' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPENETCDF -name '*.nc' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $LIGHTNING_LOG -name 'lightning_proc*' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
#
|
||||
# Purge the GageQC directories
|
||||
find $PRECIPPOINT -name 'precip*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $PRECIPGRID -name 'precip*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $PRECIPMAP -name 'map*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $PRECIPBAD -name 'precip*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $PRECIPDEV -name 'precip*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $TEMPPOINT -name 'temp*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $TEMPGRID -name 'temp*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $TEMPMAT -name 'mat*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $TEMPBAD -name 'temp*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $TEMPDEV -name 'temp*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $FREEZEPOINT -name 'freez*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $FREEZEGRID -name 'freez*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $FREEZEMAZ -name 'maz*' -type f -mtime +20 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $SCRATCH -name 'pcp.*' -type f -mtime +0 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $RFCQPETEMP -name '*[A-Z]*' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RFCQPE01 -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RFCQPE06 -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RFCQPE24 -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
find $RFCBIASLOGS -name '*[0-9]' -type f -mtime +3 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPERFCBMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $MPERFCMMOSAIC -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RFCBIASINDIR -name '*[0-9]' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
find $RFCBIASOUTDIR -name '*z' -type f -mtime +1 -print -exec rm {} \; \
|
||||
>> $fnm
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End purge_mpe_files at: " $Dte >> $fnm
|
||||
|
||||
#End of purge_mpe_files
|
|
@ -1,30 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# rerun_mpe_fieldgen
|
||||
# updated July 11, 2007 to remove
|
||||
# call to build_hourly.
|
||||
#
|
||||
# Updated March 18, 2008 to remove the call to MPE Fieldgen and error
|
||||
# log information which is not used. Also removed commented out
|
||||
# call to set_hydro_env to clean up this script some more.
|
||||
# Actions which need to be performed in addition to rerunning
|
||||
# MPE Fieldgen can be placed in this script. This script is
|
||||
# run after MPE Fieldgen is rerun.
|
||||
|
||||
# This allows you to call this script from outside of ./standard/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
#
|
||||
# Transmit the RFC bias message if the user has
|
||||
# elected to do so on reruns.
|
||||
#
|
||||
if [[ "$MPE_TRANSMIT_BIAS" = "ON" && "$TRANSMIT_BIAS_ON_RERUN" = "YES" ]]
|
||||
then
|
||||
year=`echo $2 | cut -c5-8`
|
||||
monthday=`echo $2 | cut -c1-4`
|
||||
$PPROC_BIN/transmit_rfc_bias $year$monthday$1
|
||||
fi
|
||||
|
||||
if [ "$SSHP_INVOKE_MAP_PREPROCESS" = "ON" ]
|
||||
then
|
||||
$WHFS_BIN_DIR/run_SSHP_MAP_preprocess >$SSHP_LOG_DIR/MAPPreprocessor.stdout.log 2>>$SSHP_LOG_DIR/MAPPreprocessor.stdout.log &
|
||||
fi
|
|
@ -1,146 +0,0 @@
|
|||
#!/usr/bin/perl -w
|
||||
use File::Copy;
|
||||
|
||||
|
||||
# setup the grid
|
||||
use AGRID;
|
||||
my($file,$AGID,@temp,@hght);
|
||||
|
||||
#set file name
|
||||
$file=$ARGV[0];
|
||||
#print "file is $file\n";
|
||||
|
||||
$siteid=$ARGV[1];
|
||||
$envsiteid=$ENV{"MPE_SITE_ID"};
|
||||
#print "siteid=$siteid envsiteid=$envsiteid\n";
|
||||
if ($siteid ne $envsiteid) {
|
||||
$sitefile=$ENV{"MPE_STATION_LIST_DIR"} . "/" . $siteid . "_station_list";
|
||||
#print "sitefile=$sitefile\n";
|
||||
open(STNS,$sitefile) ||
|
||||
die "Could not open ${siteid} station list file \n";
|
||||
@areastns=<STNS>;
|
||||
close(STNS);
|
||||
chomp(@areastns);
|
||||
$count = grep(/HZIRZZZ/,@areastns);
|
||||
#print "count=$count\n";
|
||||
if ($count == 0) { exit; } # if there are no freezing stations in the file, exit
|
||||
|
||||
#reinitialize so the values will be determined from the area station list file
|
||||
|
||||
@lat=();
|
||||
@lon=();
|
||||
|
||||
#return the list of freezing stations
|
||||
|
||||
@frzstns = grep(/HZIRZZZ/,@areastns);
|
||||
|
||||
foreach $fzstn(@frzstns) {
|
||||
@line=split(' ',$fzstn);
|
||||
$line[3]=(-1.0)*$line[3];
|
||||
#print "lat=$line[2] lon=$line[3]\n";
|
||||
push(@lat, $line[2]);
|
||||
push(@lon, $line[3]);
|
||||
}
|
||||
|
||||
$count=$count-1;
|
||||
}
|
||||
else {
|
||||
|
||||
$count=#####;
|
||||
|
||||
}
|
||||
|
||||
$AGID=&AGRID::setup($file,0);
|
||||
if ($AGID<0) { die &AGRID::geterrorstring; }
|
||||
if (&AGRID::setupcoord($AGID)) { die &AGRID::geterrorstring; }
|
||||
|
||||
|
||||
for ($i=0; $i<=count; $i++) {
|
||||
$lat = $lat[$i];
|
||||
$lon = $lon[$i];
|
||||
|
||||
$fzlvl[$i]=-9; #initially set to missing
|
||||
|
||||
#get the temp and hght data at all levels
|
||||
if (&AGRID::getvertpointll($AGID,"t",0,$lat,$lon,1,\@temp)) { &AGRID::geterrorstring; next; }
|
||||
if (&AGRID::getvertpointll($AGID,"gh",0,$lat,$lon,1,\@hght)) { &AGRID::geterrorstring; next; }
|
||||
if (&AGRID::getlevs($AGID,"gh",\@ghlevels)) { &AGRID::geterrorstring; next; }
|
||||
if (&AGRID::getlevs($AGID,"t",\@tlevels)) { &AGRID::geterrorstring; next; }
|
||||
|
||||
#get mb levels of all variables
|
||||
@mblevs=();
|
||||
&getmblevs(\@tlevels,\@mblevs);
|
||||
&getmblevs(\@ghlevels,\@mblevs);
|
||||
|
||||
#loop over sorted levels
|
||||
|
||||
$fzlvl[$i]=0;
|
||||
|
||||
foreach $mb (sort(Numerically @mblevs)) {
|
||||
|
||||
if (($j=&findmb($mb,\@tlevels))>=0) {
|
||||
if (($k=&findmb($mb,\@ghlevels))>=0) {
|
||||
#print "Hght & Temp at ",$mb,"\n";
|
||||
#print "j= ",$j,"\n";
|
||||
#print "k= ",$k,"\n";
|
||||
#print "hght= ",$hght[$k],"\n";
|
||||
#print "temp= ",$temp[$j],"\n";
|
||||
if ($hght[$k]>-9000) {
|
||||
if ($temp[$j]>273.16) {
|
||||
#print "jtop= ",$jtop,"\n";
|
||||
#print "ktop= ",$ktop,"\n";
|
||||
#print "hght at top= ",$hght[$ktop],"\n";
|
||||
#print "temp at top= ",$temp[$jtop],"\n";
|
||||
$fzlvl[$i] = ($hght[$ktop] -
|
||||
(($hght[$ktop] - $hght[$k]) *
|
||||
((273.16 - $temp[$jtop])/($temp[$j] -
|
||||
$temp[$jtop])))) *.00328;
|
||||
$fzlvl[$i] = substr($fzlvl[$i], 0, 4);
|
||||
#print "fzlvl= ",$fzlvl[$i],"\n";
|
||||
last;
|
||||
} else {
|
||||
$jtop=$j;
|
||||
$ktop=$k;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$j=0;
|
||||
while ($j <= $count) {
|
||||
print "$fzlvl[$j]\n";
|
||||
$j++;
|
||||
}
|
||||
|
||||
#find level index that has the specified MB value
|
||||
sub findmb {
|
||||
my($mb,$Rlev)=@_;
|
||||
my($search,$j,$lev);
|
||||
|
||||
$search=sprintf("MB %d",$mb);
|
||||
$j=0;
|
||||
foreach $lev (@$Rlev) {
|
||||
if ($lev eq $search) {
|
||||
return($j);
|
||||
}
|
||||
$j++;
|
||||
}
|
||||
return(-1);
|
||||
}
|
||||
|
||||
#find any levels that contain the text MB at the beginning
|
||||
sub getmblevs {
|
||||
my($Rlev,$Rmblev)=@_;
|
||||
my($lev,$mb);
|
||||
|
||||
foreach $lev (@$Rlev) {
|
||||
if (!($lev=~/^MB (\d+)$/)) { next; }
|
||||
$mb=$1;
|
||||
if (grep(/$mb/,@$Rmblev)==0) { push(@$Rmblev,$mb); }
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
sub Numerically { $a <=> $b }
|
|
@ -1,413 +0,0 @@
|
|||
#!/usr/bin/tclsh
|
||||
#
|
||||
|
||||
#
|
||||
# Open the log freezing level processing log file. This will
|
||||
# be written into the MPE Editor log directory.
|
||||
# This script is called from the run_freezing_level script which
|
||||
# sources set_hydro_env and defines several environmental variables
|
||||
# used in this script.
|
||||
#
|
||||
# October 17, 2007 - This script was modified to take an optional
|
||||
# command line argument specifying the number of days to process
|
||||
#
|
||||
# March 11, 2008 - Updated to log the names of the files in
|
||||
# the RUC80. Fixed variable reference typos.
|
||||
#
|
||||
# Dec 2009 - Changed date on .E line to be one day earlier
|
||||
#
|
||||
# Usage: ruc.tcl [number of days to process]
|
||||
#
|
||||
set run_from_dir [lindex $argv 0]
|
||||
package require Tclx
|
||||
|
||||
#
|
||||
# Check for a command line argument specifying the number of days to process.
|
||||
# If no argument is present, then assume that only one day is being processed.
|
||||
if { $argc == 3 } {
|
||||
|
||||
set days_to_process [lindex $argv 1 ]
|
||||
set siteid [lindex $argv 2 ]
|
||||
|
||||
} else {
|
||||
|
||||
set days_to_process 1
|
||||
set siteid $env(MPE_SITE_ID)
|
||||
|
||||
}
|
||||
|
||||
#
|
||||
# Set the freezing level data input and output directories
|
||||
set rucdir $env(RUC_MODEL_DATA_DIR)
|
||||
set mmdir $env(MPE_POINT_FREEZING_DIR)
|
||||
#set siteid $env(MPE_SITE_ID)
|
||||
|
||||
# get env variable DQC_PREPROCESSOR_BASETIME, if it is 18Z, load
|
||||
#freezing level data at12~18Z, 18~00Z, 00~06Z, 06~12Z. IF it is
|
||||
#12Z, load freezing level data at 6~12Z, 12~18Z, 18~00Z and
|
||||
#00~06Z. If it is 00Z, load freezing level data at 18~00Z,
|
||||
#00~06Z, 06~12Z, 12~18Z. If it is 06Z, load the freezing level
|
||||
#data from 00~06Z, 06~12Z, 12~18Z, 18~00Z
|
||||
|
||||
set dqc_preprocessor_basetime $env(DQC_PREPROCESSOR_BASETIME)
|
||||
puts "The dqc_preprocessor_basetime is $dqc_preprocessor_basetime"
|
||||
puts "The number of days to process is $days_to_process"
|
||||
|
||||
# test purpose
|
||||
|
||||
set rucscript $run_from_dir/ruc.pl
|
||||
|
||||
proc mformat {x} {
|
||||
if {$x == ""} {
|
||||
return M
|
||||
} elseif {$x == 0} {
|
||||
return 0.00S
|
||||
} else {
|
||||
set x [format %.1f $x]
|
||||
return [set x]0S
|
||||
}
|
||||
}
|
||||
|
||||
# Force the dqc_preprocessor_basetime string to be lower case.
|
||||
set dqc_preprocessor_basetime [string tolower $dqc_preprocessor_basetime]
|
||||
|
||||
#setup dates
|
||||
set csecs [clock seconds]
|
||||
set tsecs $csecs
|
||||
|
||||
set hour [clock format $csecs -format "%H" -gmt 1]
|
||||
puts "current hour is $hour"
|
||||
|
||||
# set cdates and tdates. Nothing needs to be done when the base hour is 00z.
|
||||
if { $dqc_preprocessor_basetime == "12z" && $hour < 12 } {
|
||||
|
||||
set csecs [expr $csecs-24*60*60]
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "12z" && $hour >= 12 } {
|
||||
|
||||
set tsecs [ expr $csecs+24*60*60]
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "18z" && $hour < 18 } {
|
||||
|
||||
set csecs [expr $csecs-24*60*60]
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "18z" && $hour >= 18 } {
|
||||
|
||||
set tsecs [expr $csecs+24*60*60]
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "06z" && $hour < 6 } {
|
||||
|
||||
set csecs [ expr $csecs-24*60*60 ]
|
||||
|
||||
} elseif { $dqc_preprocess_basetime == "06z" && $hour >= 6 } {
|
||||
|
||||
set tsecs [expr $csecs+24*60*60]
|
||||
}
|
||||
|
||||
#
|
||||
# List the available RUC80 files.
|
||||
|
||||
puts "Contents of $rucdir:"
|
||||
|
||||
set ruc_files [glob "$rucdir/*"]
|
||||
|
||||
foreach f $ruc_files {
|
||||
if {![file isdirectory $f]} {
|
||||
puts $f
|
||||
}
|
||||
}
|
||||
|
||||
# Loop over the number of days to process freezing level
|
||||
# data for. Take the user specified number of days minus 1.
|
||||
|
||||
set days_to_process [ expr $days_to_process-1 ]
|
||||
|
||||
for { set k $days_to_process } {$k >= 0 } { incr k -1 } {
|
||||
# Simultaneously subtract from csecs and tsecs the number of days
|
||||
#
|
||||
|
||||
set begin_secs [expr $csecs-24*60*60*$k]
|
||||
set end_secs [expr $tsecs-24*60*60*$k]
|
||||
|
||||
#set k1 [expr $k +1]
|
||||
#puts "k1 is $k1"
|
||||
#set end_secs1 [expr $tsecs-24*60*60*$k1]
|
||||
#set otdate1 [clock format $end_secs1 -format "%Y%m%d" -gmt 1]
|
||||
#puts "otdate1 is $otdate1"
|
||||
set otdate1 [clock format $begin_secs -format "%Y%m%d" -gmt 1]
|
||||
|
||||
set cdate [clock format $begin_secs -format "%Y%m%d" -gmt 1]
|
||||
set tdate [clock format $end_secs -format "%Y%m%d" -gmt 1]
|
||||
|
||||
puts "current days being processed are $cdate and $tdate"
|
||||
|
||||
#set file
|
||||
set otdate [clock format $end_secs -format "%Y%m%d" -gmt 1]
|
||||
set ofile $mmdir/freezing_1_${siteid}_point_$otdate
|
||||
puts "output file is $ofile"
|
||||
|
||||
#get values for hours
|
||||
|
||||
if { $dqc_preprocessor_basetime == "18z" } {
|
||||
|
||||
set dates [list $cdate $tdate $tdate $tdate]
|
||||
set hrs [list 1800 0000 0600 1200]
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "00z" } {
|
||||
|
||||
set dates [list $tdate $tdate $tdate $tdate]
|
||||
set hrs [list 0000 0600 1200 1800]
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "06z" } {
|
||||
|
||||
set dates [list $cdate $cdate $cdate $tdate]
|
||||
set hrs [list 0600 1200 1800 0000]
|
||||
|
||||
} else {
|
||||
|
||||
# dqc_preprocessor_basetime must be 12z
|
||||
set dates [list $cdate $cdate $tdate $tdate]
|
||||
set hrs [list 1200 1800 0000 0600]
|
||||
}
|
||||
|
||||
set i 0
|
||||
foreach hr $hrs dt $dates {
|
||||
puts "hour $hr"
|
||||
set file $rucdir/[set dt]_$hr
|
||||
puts "rucfile is $file"
|
||||
if {[file exists $file]} {
|
||||
puts "$rucscript $file $siteid"
|
||||
set vals$i [exec $rucscript $file $siteid]
|
||||
} else {
|
||||
puts "File $file not found. Continuing."
|
||||
set vals$i ""
|
||||
}
|
||||
puts [set vals$i]
|
||||
incr i
|
||||
}
|
||||
|
||||
#
|
||||
# If there was no data, do not overwrite the existing freezing level file.
|
||||
set len0 [ string length $vals0 ]
|
||||
set len1 [ string length $vals1 ]
|
||||
set len2 [ string length $vals2 ]
|
||||
set len3 [ string length $vals3 ]
|
||||
|
||||
if { $len0 == 0 && $len1 == 0 && $len2 == 0 && $len3 == 0 } {
|
||||
# Probably want to continue here.
|
||||
puts "No data found for $ofile."
|
||||
continue;
|
||||
}
|
||||
|
||||
#
|
||||
# Check if the output file already exists. If it does, then open it.
|
||||
# Process each record. For each blank freezing level value computed
|
||||
# above, see if there was already one in the file from a previous
|
||||
# run of this script. If there is, use it. This needs to be
|
||||
# done because RUC80 data files are only kept around for 8 hours or
|
||||
# so.
|
||||
|
||||
if [catch {open $ofile r} fileId] {
|
||||
#
|
||||
# The file does not exist or could not be opened.
|
||||
set i 0
|
||||
set out ""
|
||||
puts "file does not exist"
|
||||
|
||||
foreach v0 $vals0 v1 $vals1 v2 $vals2 v3 $vals3 {
|
||||
set stn [format %05d $i]
|
||||
set v0 [mformat $v0]
|
||||
set v1 [mformat $v1]
|
||||
set v2 [mformat $v2]
|
||||
set v3 [mformat $v3]
|
||||
|
||||
if { $dqc_preprocessor_basetime == "18Z" ||
|
||||
$dqc_preprocessor_basetime == "18z"} {
|
||||
append out ".E Z$stn $otdate1 DH18/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "00Z" ||
|
||||
$dqc_preprocessor_basetime == "00z" } {
|
||||
append out ".E Z$stn $otdate1 DH00/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "06Z" ||
|
||||
$dqc_preprocessor_basetime == "06z" } {
|
||||
append out ".E Z$stn $otdate1 DH06/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
|
||||
} else {
|
||||
append out ".E Z$stn $otdate1 DH12/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
}
|
||||
|
||||
incr i
|
||||
}
|
||||
|
||||
puts -nonewline $out
|
||||
} else {
|
||||
#
|
||||
# The file does exist.
|
||||
set i 0
|
||||
set out ""
|
||||
|
||||
puts "file exist"
|
||||
foreach v0 $vals0 v1 $vals1 v2 $vals2 v3 $vals3 {
|
||||
|
||||
set stn [format %06d $i]
|
||||
set v0 [mformat $v0]
|
||||
set v1 [mformat $v1]
|
||||
set v2 [mformat $v2]
|
||||
set v3 [mformat $v3]
|
||||
|
||||
# Read the record.
|
||||
gets $fileId line
|
||||
|
||||
set len [ string length $line ]
|
||||
|
||||
if {$len > 0} {
|
||||
|
||||
if { $dqc_preprocessor_basetime == "18Z" ||
|
||||
$dqc_preprocessor_basetime == "18z"} {
|
||||
|
||||
# Parse the line for the four freezing level values.
|
||||
scan $line {%s %s %s %s %s %s %s %s} sheftype hb5 date rest v18 v00 v06 v12
|
||||
|
||||
set v18 [ string trimright $v18 / ]
|
||||
set v00 [ string trimright $v00 / ]
|
||||
set v06 [ string trimright $v06 / ]
|
||||
set v12 [ string trimright $v12 / ]
|
||||
set v18 [ string trimleft $v18 ]
|
||||
set v00 [ string trimleft $v00 ]
|
||||
set v06 [ string trimleft $v06 ]
|
||||
set v12 [ string trimleft $v12 ]
|
||||
|
||||
if { $v0 == "M" && $v18 != "M" } {
|
||||
set v0 $v18
|
||||
}
|
||||
if { $v1 == "M" && $v00 != "M" } {
|
||||
set v1 $v00
|
||||
}
|
||||
if { $v2 == "M" && $v06 != "M" } {
|
||||
set v2 $v06
|
||||
}
|
||||
if { $v3 == "M" && $v12 != "M" } {
|
||||
set v3 $v12
|
||||
}
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "00Z" ||
|
||||
$dqc_preprocessor_basetime == "00z"} {
|
||||
|
||||
scan $line {%s %s %s %s %s %s %s %s} sheftype hb5 date rest v00 v06 v12 v18
|
||||
|
||||
set v18 [ string trimright $v00 / ]
|
||||
set v00 [ string trimright $v06 / ]
|
||||
set v06 [ string trimright $v12 / ]
|
||||
set v12 [ string trimright $v18 / ]
|
||||
set v18 [ string trimleft $v00 ]
|
||||
set v00 [ string trimleft $v06 ]
|
||||
set v06 [ string trimleft $v12 ]
|
||||
set v12 [ string trimleft $v18 ]
|
||||
|
||||
if { $v0 == "M" && $v00 != "M" } {
|
||||
set v0 $v00
|
||||
}
|
||||
if { $v1 == "M" && $v06 != "M" } {
|
||||
set v1 $v06
|
||||
}
|
||||
if { $v2 == "M" && $v12 != "M" } {
|
||||
set v2 $v12
|
||||
}
|
||||
if { $v3 == "M" && $v18 != "M" } {
|
||||
set v3 $v18
|
||||
}
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "06Z" ||
|
||||
$dqc_preprocessor_basetime == "06z"} {
|
||||
|
||||
scan $line {%s %s %s %s %s %s %s %s} sheftype hb5 date rest v06 v12 v18 v00
|
||||
|
||||
set v18 [ string trimright $v06 / ]
|
||||
set v00 [ string trimright $v12 / ]
|
||||
set v06 [ string trimright $v18 / ]
|
||||
set v12 [ string trimright $v00 / ]
|
||||
set v18 [ string trimleft $v06 ]
|
||||
set v00 [ string trimleft $v12 ]
|
||||
set v06 [ string trimleft $v18 ]
|
||||
set v12 [ string trimleft $v00 ]
|
||||
|
||||
if { $v0 == "M" && $v06 != "M" } {
|
||||
set v0 $v06
|
||||
}
|
||||
if { $v1 == "M" && $v12 != "M" } {
|
||||
set v1 $v12
|
||||
}
|
||||
if { $v2 == "M" && $v18 != "M" } {
|
||||
set v2 $v18
|
||||
}
|
||||
if { $v3 == "M" && $v00 != "M" } {
|
||||
set v3 $v00
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
# Parse the line for the four freezing level values.
|
||||
scan $line {%s %s %s %s %s %s %s %s} sheftype hb5 date rest v12 v18 v00 v06
|
||||
|
||||
set v12 [ string trimright $v12 / ]
|
||||
set v18 [ string trimright $v18 / ]
|
||||
set v00 [ string trimright $v00 / ]
|
||||
set v06 [ string trimright $v06 / ]
|
||||
set v12 [ string trimleft $v12 ]
|
||||
set v18 [ string trimleft $v18 ]
|
||||
set v00 [ string trimleft $v00 ]
|
||||
set v06 [ string trimleft $v06 ]
|
||||
|
||||
if { $v0 == "M" && $v12 != "M" } {
|
||||
set v0 $v12
|
||||
}
|
||||
if { $v1 == "M" && $v18 != "M" } {
|
||||
set v1 $v18
|
||||
}
|
||||
if { $v2 == "M" && $v00 != "M" } {
|
||||
set v2 $v00
|
||||
}
|
||||
if { $v3 == "M" && $v06 != "M" } {
|
||||
set v3 $v06
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if { $dqc_preprocessor_basetime == "18Z" ||
|
||||
$dqc_preprocessor_basetime == "18z"} {
|
||||
|
||||
append out ".E Z$stn $otdate1 DH18/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "00Z" ||
|
||||
$dqc_preprocessor_basetime == "00z"} {
|
||||
|
||||
append out ".E Z$stn $otdate1 DH00/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
|
||||
} elseif { $dqc_preprocessor_basetime == "06Z" ||
|
||||
$dqc_preprocessor_basetime == "06z"} {
|
||||
|
||||
append out ".E Z$stn $otdate1 DH06/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
|
||||
} else {
|
||||
|
||||
append out ".E Z$stn $otdate1 DH12/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
|
||||
}
|
||||
|
||||
incr i
|
||||
}
|
||||
|
||||
#
|
||||
# Close the freezing level data file.
|
||||
close $fileId
|
||||
|
||||
puts -nonewline $out
|
||||
|
||||
}
|
||||
|
||||
# Write the Freezing Level File.
|
||||
write_file $ofile $out
|
||||
|
||||
#Continue with the next day to process.
|
||||
}
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
# Bias Table Message File Generation Script
|
||||
|
||||
#
|
||||
# This allows you to call this script from outside of ./whfs/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Export the value of FXA_HOME here. This allows read_env.sh to be
|
||||
# sourced before set_hydro_env. This prevents set_hydro_env environment
|
||||
# and token settings from being overwritten by read_env.sh.
|
||||
#export FXA_HOME=/awips/fxa
|
||||
|
||||
# Set up the AWIPS environment.
|
||||
#. $FXA_HOME/readenv.sh
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export FXA_LOCAL_SITE=$(get_apps_defaults fxa_local_site)
|
||||
TZ=GMT
|
||||
export TZ
|
||||
|
||||
#
|
||||
# setup log file
|
||||
#
|
||||
|
||||
BIASMESSLOGDIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
|
||||
fnm=$BIASMESSLOGDIR/biasmesgen.log
|
||||
tmp=$BIASMESSLOGDIR/biasmesgen.tmp
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
if [[ -s $fnm ]]
|
||||
then
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
fi
|
||||
|
||||
#
|
||||
# begin write to log
|
||||
#
|
||||
|
||||
echo "-------------------------------------------" >> $fnm
|
||||
Dte=`date -u`
|
||||
echo "Begin at "$Dte >> $fnm
|
||||
|
||||
#
|
||||
# Generate Bias Table Message
|
||||
#
|
||||
|
||||
runso rary.ohd.pproc biasmesgen_main >> $fnm
|
||||
|
||||
#
|
||||
# end write to log
|
||||
#
|
||||
|
||||
Dte=`date -u`
|
||||
echo "End at "$Dte >> $fnm
|
||||
exit 0
|
|
@ -1,95 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
###############################################################################
|
||||
# run_convert_basin_format
|
||||
#
|
||||
# This script converts a CBRFC formatted basin file to whfs formatted file.
|
||||
#
|
||||
# This script takes three arguments:
|
||||
#
|
||||
# <input_file> : The cbrfc formatted basin file to be converted
|
||||
#
|
||||
# <output_file> : The whfs formatted output file
|
||||
#
|
||||
# <input_file_directory> : The location of the input file
|
||||
#
|
||||
# The reformatted output file will be placed in the directory pointed to
|
||||
# by the whfs_geodata_dir token.
|
||||
#
|
||||
# Modification History
|
||||
# June 5, 2006 Ram Varma Original Coding.
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
#
|
||||
# The user must supply the input file name, output file name and location of the input file
|
||||
|
||||
|
||||
|
||||
if [[ $# -ne 3 ]]
|
||||
then
|
||||
printf "Usage: run_convert_basin_format <input_file> <output_file> <input_file_directory> "
|
||||
printf "\twhere input_file is the file to be converted"
|
||||
printf "\tand output file is the converted file"
|
||||
printf "\tand the input file directory is the location of the source file"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
input_file=$1
|
||||
output_file=$2
|
||||
input_file_directory=$3
|
||||
|
||||
#
|
||||
# Set up environment variables
|
||||
# Define FILENAME variable
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export WHFS_GEODATA_DIR=$(get_apps_defaults whfs_geodata_dir)
|
||||
|
||||
printf "The geo data directory is: $WHFS_GEODATA_DIR"
|
||||
|
||||
printf "converting $WHFS_GEODATA_DIR/$input_file to whfs basin file format and storing it in $output_file"
|
||||
|
||||
$RUN_FROM_DIR/convert_basin_format.LX $input_file_directory/$input_file $output_file
|
||||
|
||||
printf "done converting..."
|
||||
|
||||
printf "checking for prior existance of $output_file in $WHFS_GEODATA_DIR/"
|
||||
|
||||
#
|
||||
# Check for the existence of the output file. If it exists, warn the user
|
||||
# that this file will be overwritten. If the choice is to overwrite,
|
||||
# a backup of the original file will be created wit a ".bak" extension.
|
||||
# Give the user the chance to quit.
|
||||
#
|
||||
if [[ -a $WHFS_GEODATA_DIR/$output_file ]]
|
||||
then
|
||||
|
||||
printf "The $WHFS_GEODATA_DIR/$output_file file exists.\n"
|
||||
printf "It will be overwritten. Procede?\n"
|
||||
read overwrite?" (Y/N):"
|
||||
|
||||
while [[ $overwrite != "N" && $overwrite != "Y" ]]
|
||||
do
|
||||
read overwrite?" Please enter Y or N:"
|
||||
done
|
||||
|
||||
if [ $overwrite == "N" ]
|
||||
then
|
||||
printf "The basin file conversion has been aborted.\n"
|
||||
exit 1;
|
||||
elif [ $overwrite == "Y" ]
|
||||
then
|
||||
printf "creating a backup of the original $WHFS_GEODATA_DIR/$output_file file"
|
||||
printf "Moving the $WHFS_GEODATA_DIR/$output_file to $WHFS_GEODATA_DIR/$output_file.bak"
|
||||
mv $WHFS_GEODATA_DIR/$output_file $WHFS_GEODATA_DIR/$output_file.bak
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
printf "\n"
|
||||
printf "Copying $output_file in $WHFS_GEODATA_DIR/"
|
||||
|
||||
cp $output_file $WHFS_GEODATA_DIR/
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
###############################################################################
|
||||
# convert_dqc_climo_list
|
||||
#
|
||||
# This script converts a CBRFC-formatted climatology list into the format
|
||||
# required by MPE Editor.
|
||||
#
|
||||
# This script takes three arguments:
|
||||
#
|
||||
# <input_file> : The path and name of the station list file to be
|
||||
# converted.
|
||||
# <area_id> : cbrfc, for an example.
|
||||
#
|
||||
#
|
||||
# Modification History
|
||||
# June 5, 2006 Ram Varma Original Coding.
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
#
|
||||
# The user must supply the input file name and the area_id.
|
||||
|
||||
#
|
||||
if [[ $# -ne 2 ]]
|
||||
then
|
||||
print "Usage: convert_dqc_climo_list <input_file> <area_id> "
|
||||
print "\twhere input_file is the path and name of the input file"
|
||||
print "\t area_id cbrfc, as an example"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
input_file=$1
|
||||
area_id=$2
|
||||
|
||||
#
|
||||
# Set up environment variables
|
||||
# Define FILENAME variable
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_CLIMO_DIR=$(get_apps_defaults mpe_climo_dir)
|
||||
|
||||
FILENAME=$MPE_CLIMO_DIR/${area_id}_station_climo_list
|
||||
|
||||
print "Reading $input_file"
|
||||
print "Reformatted climo station list will be written to $FILENAME"
|
||||
|
||||
rm -f $FILENAME
|
||||
|
||||
#
|
||||
# Get a count of the number of PPM records in the climo file.
|
||||
export num_ppm_records=`awk '{print $2}' $input_file | \
|
||||
awk 'BEGIN{count=0} /^PPM/ {count=count+1} END{print count}'`
|
||||
|
||||
#
|
||||
# Get a count of the number of TAI records in the climo file.
|
||||
export num_tai_records=`awk '{print $2}' $input_file | \
|
||||
awk 'BEGIN{count=0} /^TAI/ {count=count+1} END{print count}'`
|
||||
|
||||
print "$input_file contains $num_ppm_records PPM records"
|
||||
print "$input_file contains $num_tai_records TAI records"
|
||||
|
||||
echo $num_ppm_records >> $FILENAME
|
||||
awk '{if(FNR <= ENVIRON["num_ppm_records"]) print $0}' $input_file \
|
||||
>> $FILENAME
|
||||
|
||||
#
|
||||
# There are two tai climo records for each station.
|
||||
((num_temp_records=$num_tai_records/2))
|
||||
|
||||
echo $num_temp_records >> $FILENAME
|
||||
awk '{if(FNR > ENVIRON["num_ppm_records"] && \
|
||||
FNR <= ( ENVIRON["num_tai_records"] + ENVIRON["num_ppm_records"])) \
|
||||
print $0}' $input_file >> $FILENAME
|
||||
|
||||
exit 0
|
|
@ -1,133 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
###############################################################################
|
||||
# convert_dqc_station_list
|
||||
#
|
||||
# This script converts a CBRFC-formatted station list into the format
|
||||
# expected by MPE Editor.
|
||||
#
|
||||
# This script takes three arguments:
|
||||
#
|
||||
# <input_file> : The path and name of the station list file to be
|
||||
# converted.
|
||||
# <pph_flag> : Y or N. Indicates whether or not to place the PPH
|
||||
# station list at the top of the file. If this is a
|
||||
# subarea station list, then this should be N. If this is
|
||||
# the master DailyQC station list then this should be Y.
|
||||
# <area_id> : cbrfc, for an example.
|
||||
#
|
||||
# The reformatted station list will be placed in the directory pointed to
|
||||
# by the mpe_station_list_dir token. This file will be named as
|
||||
# <area_id>_station_list.
|
||||
#
|
||||
# Modification History
|
||||
# May 24, 2006 Bryon Lawrence Original Coding.
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
#
|
||||
# The user must supply the input file name, the Y or N flag indicating
|
||||
# whether or not to include the PPH station list and the area_id.
|
||||
|
||||
#
|
||||
if [[ $# -ne 3 ]]
|
||||
then
|
||||
print "Usage: convert_dqc_station_list <input_file> <pph_flag> <area_id> "
|
||||
print "\twhere input_file is the path and name of the input file"
|
||||
print "\t pph_flag = Y to load PPH station list"
|
||||
print "\t N to not load PPH station list"
|
||||
print "\t area_id cbrfc, as an example"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
input_file=$1
|
||||
pph_flag=$2
|
||||
area_id=$3
|
||||
|
||||
#
|
||||
# Set up environment variables
|
||||
# Define FILENAME variable
|
||||
#
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_STATION_LIST_DIR=$(get_apps_defaults mpe_station_list_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
FILENAME=$MPE_STATION_LIST_DIR/${area_id}_station_list
|
||||
|
||||
print "Reading $input_file"
|
||||
print "Reformatted station list will be written to $FILENAME"
|
||||
|
||||
rm -f $FILENAME
|
||||
|
||||
if [[ $pph_flag = "Y" ]]
|
||||
then
|
||||
print "Including PPH station list."
|
||||
else
|
||||
print "Excluding PPH station list. "
|
||||
fi
|
||||
|
||||
#Check if the PPH list needs to be appended.
|
||||
if [[ $pph_flag = "Y" ]]
|
||||
then
|
||||
############################################################################
|
||||
#
|
||||
# Generate station list for 1 hr MPE
|
||||
#
|
||||
$POSTGRESQLBINDIR/psql -q -t -A -F ' ' $DB_NAME \
|
||||
-f $RUN_FROM_DIR/hourly_precip_station_gen.sql \
|
||||
> hourly_precip_station_list
|
||||
|
||||
#
|
||||
# Get count of records from above and write to station_list file
|
||||
#
|
||||
wc -l < hourly_precip_station_list > $FILENAME
|
||||
|
||||
#
|
||||
# Write precip station info to station_list file
|
||||
#
|
||||
cat hourly_precip_station_list >> $FILENAME
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# Get a count of the number of PPD records in the CBRFC station file.
|
||||
export num_ppd_records=`awk '{print $2}' $input_file | \
|
||||
awk 'BEGIN{count=0} /^PPD/ {count=count+1} END{print count}'`
|
||||
|
||||
#
|
||||
# Get a count of the number of TAI records in the CBRFC station file.
|
||||
export num_tai_records=`awk '{print $2}' $input_file | \
|
||||
awk 'BEGIN{count=0} /^TAI/ {count=count+1} END{print count}'`
|
||||
|
||||
#
|
||||
# Get a count of the number of HZI records in the CBRFC station file.
|
||||
export num_hzi_records=`awk '{print $2}' $input_file | \
|
||||
awk 'BEGIN{count=0} /^HZI/ {count=count+1} END{print count}'`
|
||||
|
||||
print "$input_file contains $num_ppd_records PPD records"
|
||||
print "$input_file contains $num_tai_records TAI records"
|
||||
print "$input_file contains $num_hzi_records HZI records"
|
||||
|
||||
#
|
||||
# Insert a dummy record for the PPH stations.
|
||||
# Or insert the the PPH block as created by the create station list
|
||||
# script.
|
||||
echo $num_ppd_records >> $FILENAME
|
||||
awk '{if(FNR <= ENVIRON["num_ppd_records"]) print $0}' $input_file \
|
||||
>> $FILENAME
|
||||
|
||||
echo $num_tai_records >> $FILENAME
|
||||
awk '{if(FNR > ENVIRON["num_ppd_records"] && \
|
||||
FNR <= ( ENVIRON["num_tai_records"] + ENVIRON["num_ppd_records"])) \
|
||||
print $0}' $input_file >> $FILENAME
|
||||
|
||||
echo $num_hzi_records >> $FILENAME
|
||||
awk '{if(FNR > (ENVIRON["num_tai_records"] + ENVIRON["num_ppd_records"])) \
|
||||
print $0}' $input_file >> $FILENAME
|
||||
|
||||
|
||||
rm -f hourly_precip_station_list
|
||||
|
||||
exit 0
|
|
@ -1,26 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_copygb
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export COPYGB_LOG_DIR=$(get_apps_defaults pproc_util_log_dir)
|
||||
export PPROC_LOCAL_BIN=$(get_apps_defaults pproc_local_bin)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export WHFS_BIN_DIR=$(get_apps_defaults whfs_bin_dir)
|
||||
|
||||
# Create error log file name
|
||||
logname=COPYGB_`date -u +error.%Y%m%d%H%M%S`
|
||||
errorlog=$COPYGB_LOG_DIR/$logname
|
||||
|
||||
#
|
||||
# Run copygb with parameters specified
|
||||
# Delete error log if empty
|
||||
#
|
||||
|
||||
runso rary.ohd.pproc copygb_main_ $@ 2>>$errorlog
|
||||
echo copygb error log located at: $errorlog
|
||||
|
||||
exit 0
|
|
@ -1,12 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_create_gage_file
|
||||
|
||||
# This script is called from Build_Station_List.
|
||||
# It should not be run alone. It assumes that set_hydro_env has already
|
||||
# been sourced.
|
||||
|
||||
# This allows this script to be run from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
runso rary.ohd.pproc create_freezing_station_list_main
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_create_gage_file
|
||||
|
||||
# This allows this script to be run from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Set up SOME environmental variables required by hmap_mpe.
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export RFCWIDE_BEAMHEIGHT_DIR=$(get_apps_defaults rfcwide_beamheight_dir)
|
||||
|
||||
runso rary.ohd.pproc create_mpe_beam_height_main $RFCWIDE_BEAMHEIGHT_DIR
|
|
@ -1,16 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_create_mpe_climo_lists
|
||||
#
|
||||
# Usage: run_create_mpe_climo_lists
|
||||
|
||||
# This allows this script to be run from outside of ./standard/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Set up SOME environmental variables.
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_EDITOR_LOGS_DIR=$(get_apps_defaults mpe_editor_logs_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
#$RUN_FROM_DIR/dqc_preproc_setup
|
||||
#$PPROC_BIN/dqc_preproc_setup_main
|
||||
runso rary.ohd.pproc dqc_preproc_setup_main
|
|
@ -1,12 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_create_gage_file
|
||||
|
||||
# This allows this script to be run from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Set up SOME environmental variables required by hmap_mpe.
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export RFCWIDE_GAGELOC_DIR=$(get_apps_defaults rfcwide_gageloc_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
runso rary.ohd.pproc create_mpe_gage_file_main -d$DB_NAME $RFCWIDE_GAGELOC_DIR
|
|
@ -1,133 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# Script to generate a list of stations and related information for
|
||||
# (1) 1 hr precip stations for hourly MPE
|
||||
# (2) 6 hr/daily precip stations
|
||||
# (3) 6 hr and max/min temperature stations
|
||||
# (4) 6 hr freezing level grid points
|
||||
#
|
||||
|
||||
# Inputs:
|
||||
# MPE_STATION_LIST_DIR - defined in set_hydro_env
|
||||
# - dir containing station info list
|
||||
#
|
||||
# MPE_SITE_ID - defined in set_hydro_env
|
||||
# - used to create filename containing station info
|
||||
#
|
||||
# Output
|
||||
# The station information is written to a file named "station_info_list".
|
||||
# This file is located in the directory pointed to by the mpe_station_list_dir token.
|
||||
#
|
||||
# Temporary Files
|
||||
# This script writes results of queries to the following temporary files:
|
||||
# temperature_station_list
|
||||
# precip_station_list
|
||||
# These files are deleted at the end of the script.
|
||||
####################################################################################
|
||||
#
|
||||
# Set up environment variables
|
||||
# Define FILENAME variable
|
||||
#
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_STATION_LIST_DIR=$(get_apps_defaults mpe_station_list_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
export MPE_SITE_ID=$(get_apps_defaults mpe_site_id)
|
||||
|
||||
FILENAME=$MPE_STATION_LIST_DIR/${MPE_SITE_ID}_station_list
|
||||
####################################################################################
|
||||
#
|
||||
# Generate station list for 1 hr MPE
|
||||
#
|
||||
$POSTGRESQLBINDIR/psql -q -t -A -F ' ' $DB_NAME -f $RUN_FROM_DIR/hourly_precip_station_gen.sql > $RUN_FROM_DIR/hourly_precip_station_list
|
||||
|
||||
#
|
||||
# Get count of records from above and write to station_list file
|
||||
#
|
||||
wc -l < $RUN_FROM_DIR/hourly_precip_station_list > $FILENAME
|
||||
|
||||
#
|
||||
# Write precip station info to station_list file
|
||||
#
|
||||
cat $RUN_FROM_DIR/hourly_precip_station_list >> $FILENAME
|
||||
|
||||
####################################################################################
|
||||
#
|
||||
# Generate station list for 6 hr and 24 hr precip stations
|
||||
#
|
||||
# Generate station info for precip stations and write results to temporary file
|
||||
#
|
||||
$POSTGRESQLBINDIR/psql -q -t -A -F ' ' $DB_NAME -f $RUN_FROM_DIR/precip_station_gen.sql > $RUN_FROM_DIR/precip_station_list
|
||||
|
||||
#
|
||||
# Get count of records from above and write to station_list file
|
||||
#
|
||||
wc -l < $RUN_FROM_DIR/precip_station_list >> $FILENAME
|
||||
|
||||
#
|
||||
# Write precip station info to station_list file
|
||||
#
|
||||
cat $RUN_FROM_DIR/precip_station_list >> $FILENAME
|
||||
|
||||
####################################################################################
|
||||
#
|
||||
# Generate station list for 6 hr and max/min temperature stations
|
||||
#
|
||||
# Generate station info for temperature stations and write results to temporary file
|
||||
#
|
||||
$POSTGRESQLBINDIR/psql -q -t -A -F ' ' $DB_NAME -f $RUN_FROM_DIR/temperature_station_gen.sql > $RUN_FROM_DIR/temperature_station_list
|
||||
|
||||
#
|
||||
# Get count of records from above and write to station_list file
|
||||
#
|
||||
wc -l < $RUN_FROM_DIR/temperature_station_list >> $FILENAME
|
||||
|
||||
#
|
||||
# Write temperature station info to station_list file
|
||||
#
|
||||
cat $RUN_FROM_DIR/temperature_station_list >> $FILENAME
|
||||
|
||||
#
|
||||
# Delete temporary files
|
||||
#
|
||||
|
||||
rm -f $RUN_FROM_DIR/temperature_station_list
|
||||
|
||||
rm -f $RUN_FROM_DIR/precip_station_list
|
||||
|
||||
rm -f $RUN_FROM_DIR/hourly_precip_station_list
|
||||
|
||||
####################################################################################
|
||||
#
|
||||
# Generate grid points for freezing level data
|
||||
#
|
||||
|
||||
#
|
||||
# Create the temporary file of freezing level stations
|
||||
#
|
||||
$RUN_FROM_DIR/run_create_freezing_station_list
|
||||
|
||||
#
|
||||
# Concatentate this temporary file to the end of the
|
||||
# station file list.
|
||||
#
|
||||
cat $MPE_STATION_LIST_DIR/${MPE_SITE_ID}_freezing_station_list >> $FILENAME
|
||||
|
||||
#
|
||||
# Create the ruc.pl script from the ruc.pl.template file.
|
||||
# This file is responsible for retrieving freezing level
|
||||
# data from the RUC80 model for grid points contained in the
|
||||
# offices MPE forecast area.
|
||||
#
|
||||
$RUN_FROM_DIR/run_freezing_station_setup
|
||||
|
||||
#
|
||||
# Remove the temporary freezing station list file.
|
||||
#
|
||||
rm -f $MPE_STATION_LIST_DIR/${MPE_SITE_ID}_freezing_station_list
|
||||
|
||||
#
|
||||
# Done
|
||||
#
|
||||
exit 0
|
|
@ -1,75 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
# run_create_prism
|
||||
#
|
||||
# Bryon Lawrence October 5, 2006 OHD
|
||||
#
|
||||
# Purpose:
|
||||
#
|
||||
# This is the run script for the MPEClimo PRISM generating program. It takes
|
||||
# the national precipitation and temperature PRISM files available from Orgeon State
|
||||
# University and crops them to a office's MPE forecast area. These cropped PRISM
|
||||
# files are stored in XMRG format. They can be displayed in MPE Editor.
|
||||
#
|
||||
# This script requires three arguments:
|
||||
#
|
||||
# 1) Input Prism File Prefix
|
||||
# 2) Output Prism File Suffix
|
||||
# 3) Temperature/Precipitation PRISM flag. This may be either "t" or "p"
|
||||
#
|
||||
# The prism_input_dir token provides the path to the raw PRISM files.
|
||||
# The prism_output_dir token provides the path to the cropped XMRG PRISM files.
|
||||
#
|
||||
# This routine expects there to be 13 raw PRISM files, one for each month and one
|
||||
# for the entire year. This routine will create XMRG files corresponding to the
|
||||
# raw PRISM files.
|
||||
#
|
||||
# This routine assumes that the input prism files have the suffix "_MM" where MM
|
||||
# is the 2 digit month number. When generating the output PRISM files, this
|
||||
# routine will append a "_MMM" to each month's XMRG PRISM filename. MMM is the
|
||||
# lower case 3 character month abbreviation.
|
||||
#
|
||||
# For example, run_create_prism us_tmax prism_max_temp t
|
||||
#
|
||||
# Will look for the files
|
||||
#
|
||||
# us_tmax_01 us_tmax_04 us_tmax_07 us_tmax_10 us_tmax_14
|
||||
# us_tmax_02 us_tmax_05 us_tmax_08 us_tmax_11
|
||||
# us_tmax_03 us_tmax_06 us_tmax_09 us_tmax_12
|
||||
#
|
||||
# in the input directort specified by the prism_input_dir token.
|
||||
#
|
||||
# It will generate the following files in the prism_output_dir token:
|
||||
#
|
||||
# prism_max_temp_jan prism_max_temp_apr prism_max_temp_jul prism_max_temp_oct
|
||||
# prism_max_temp_feb prism_max_temp_may prism_max_temp_aug prism_max_temp_nov
|
||||
# prism_max_temp_mar prism_max_temp_jun prism_max_temp_sep prism_max_temp_dec
|
||||
# prism_max_temp_ann
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
if [[ $# -ne 3 ]]
|
||||
then
|
||||
print "Usage: run_create_prism <prism_input_file_prefix>"
|
||||
print " <prism_output_file_prefix>"
|
||||
print " <t|p>"
|
||||
exit 1;
|
||||
fi
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export SYS_JAVA_DIR=/awips2/java
|
||||
export SYS_JAVA_BIN_DIR=$SYS_JAVA_DIR/bin
|
||||
|
||||
# Temporarily set here.
|
||||
export CLASSPATH=${PPROC_BIN}/prism.jar
|
||||
|
||||
# Temporarily set here.
|
||||
export prism_input_dir=/awips2/awipsShare/hydroapps/precip_proc/local/data/log/disagg
|
||||
export prism_output_dir=/awips2/awipsShare/hydroapps/precip_proc/local/data/app/mpe/prism
|
||||
#export prism_input_dir=/fs/hseb/whfs/site_data/mpe_data/PRISM
|
||||
#export prism_output_dir=/fs/hseb/whfs/cbrfc_historical/cbrfc/mpe_editor/local/data/app/MPE/PRISM
|
||||
|
||||
# Start the MPEClimo program
|
||||
$SYS_JAVA_BIN_DIR/java -classpath ${CLASSPATH} ohd.hseb.prism.MPEClimo $1 $2 $3
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_create_gage_file
|
||||
|
||||
# This allows this script to be run from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Set up SOME environmental variables required by hmap_mpe.
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
runso rary.ohd.pproc read_topo_main
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/ksh
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export UTILTRI_DIR=$(get_apps_defaults rfcwide_utiltriangles_dir)
|
||||
utiltri=$UTILTRI_DIR/utiltriangles
|
||||
if [[ -f $utiltri ]]
|
||||
then
|
||||
echo triangle files exist!
|
||||
exit 0
|
||||
fi
|
||||
echo creating triangles .....
|
||||
|
||||
#$RUN_FROM_DIR/create_triangles.LX
|
||||
runso rary.ohd.pproc create_triangles_main
|
||||
|
||||
echo Finished creating triangles !!!!!
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_disagg
|
||||
# Oct 31, 2002
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export DISAGG_LOG_DIR=$(get_apps_defaults disagg_log_dir)
|
||||
export HYDRO_PUBLICBIN=$(get_apps_defaults hydro_publicbin)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# Don't fire up another instance if one is underway
|
||||
#
|
||||
# AMIRUNNIN_DIR env is REQUIRED by name by amirunning script
|
||||
#
|
||||
|
||||
#AMIRUNNING_DIR=$DISAGG_LOG_DIR
|
||||
#echo $HYDRO_PUBLICBIN
|
||||
#. $HYDRO_PUBLICBIN/amirunning
|
||||
|
||||
#if [[ $AmIRunning = "yes" ]]; then
|
||||
#echo Run_disagg script canceled due to disagg is currently running
|
||||
#exit 1
|
||||
# fi
|
||||
|
||||
$PPROC_BIN/disagg$OS_SUFFIX
|
|
@ -1,37 +0,0 @@
|
|||
# ============================================================================
|
||||
# pgm: run_disagg_fieldgen .. test ver of script to run run_disagg/run_mpe_fieldgen
|
||||
#
|
||||
# use: test_run_all [num-hours]
|
||||
#
|
||||
# in: num-hours .... (optional) if given, input into mpe_fieldgen as the
|
||||
# in: number of hours, else default to 3
|
||||
# in: CURRENTLY NOT IMPLEMENTED
|
||||
#
|
||||
# rqd: commands - run_disagg, run_mpe_fieldgen, set_hydro_env
|
||||
#
|
||||
# ver: 20021022
|
||||
# ============================================================================
|
||||
type dd_help 1>/dev/null 2>&1 && . dd_help 2>/dev/null
|
||||
|
||||
Optns="$@"
|
||||
|
||||
# This allows you to call this script from outside of ./whfs/bin
|
||||
# RUN_FROM_DIR=`dirname $0`
|
||||
# WhfsDir=$RUN_FROM_DIR/../..
|
||||
|
||||
## -------------------------------------- set current dir for scripts
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
## -------------------------------------- run run_disagg
|
||||
$PPROC_BIN/run_disagg $Optns
|
||||
RtnCod=$?
|
||||
|
||||
if [ "$RtnCod" != 0 ]; then exit 1; fi
|
||||
|
||||
## -------------------------------------- run run_mpe_fieldgen
|
||||
|
||||
$PPROC_BIN/run_mpe_fieldgen $Optns
|
||||
|
|
@ -1,73 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_dqc_preprocessor
|
||||
#
|
||||
# Usage: run_dqc_preprocessor [-d num_days] [-t run_date] [-a sub_areas] [-z]
|
||||
# where num_days = NN
|
||||
# run_date = YYYYMMDD
|
||||
# sub_areas = sub1,sub2
|
||||
|
||||
# This allows this script to be run from outside of ./standard/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Set up SOME environmental variables.
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
. $RUN_FROM_DIR/../../check_app_context
|
||||
|
||||
export MPE_EDITOR_LOGS_DIR=$(get_apps_defaults mpe_editor_logs_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
#
|
||||
# Initialize the parameters
|
||||
#
|
||||
NUM_DAYS=10
|
||||
RUN_DATE=`date -u +%Y%m%d`
|
||||
SUBAREA_LIST="ALL"
|
||||
SET_ZERO="OFF"
|
||||
|
||||
#
|
||||
# Setup log file
|
||||
#
|
||||
LOG_DATE=`date -u +%Y%m%d%H%M%S`
|
||||
LOGNAME=$MPE_EDITOR_LOGS_DIR/mpe_preprocessor_$LOG_DATE
|
||||
|
||||
#
|
||||
# Parse the command parameters
|
||||
#
|
||||
args=`getopt d:t:a:z $*`
|
||||
if [ $? != 0 ]
|
||||
then
|
||||
echo "Usage: run_dqc_preprocessor [-d num_days] [-t run_date] [-a sub_areas] [-z]"
|
||||
echo " num_days = N"
|
||||
echo " run_date = YYYYMMDD"
|
||||
echo " sub_areas = sub1,sub2,..."
|
||||
echo " z option = set precip value to zero"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -- $args
|
||||
for i
|
||||
do
|
||||
case "$i" in
|
||||
-d) shift; NUM_DAYS="$1"; shift;;
|
||||
-t) shift; RUN_DATE="$1"; shift;;
|
||||
-a) shift; SUBAREA_LIST="$1"; shift;;
|
||||
-z) shift; SET_ZERO="ON";;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ $SET_ZERO = "ON" ]
|
||||
then
|
||||
if [ $SUBAREA_LIST = "ALL" ]
|
||||
then
|
||||
runso rary.ohd.pproc dqc_preproc_main -d$NUM_DAYS -t$RUN_DATE -z > $LOGNAME 2>&1
|
||||
else
|
||||
runso rary.ohd.pproc dqc_preproc_main -d$NUM_DAYS -a$SUBAREA_LIST -t$RUN_DATE -z > $LOGNAME 2>&1
|
||||
fi
|
||||
else
|
||||
if [ $SUBAREA_LIST = "ALL" ]
|
||||
then
|
||||
runso rary.ohd.pproc dqc_preproc_main -d$NUM_DAYS -t$RUN_DATE > $LOGNAME 2>&1
|
||||
else
|
||||
runso rary.ohd.pproc dqc_preproc_main -d$NUM_DAYS -t$RUN_DATE -a$SUBAREA_LIST > $LOGNAME 2>&1
|
||||
fi
|
||||
fi
|
|
@ -1,41 +0,0 @@
|
|||
# ============================================================================
|
||||
# pgm: run_fieldgen_disagg_fieldgen .. script to run run_mpe_fielgen/run_disagg# /run_mpe_fieldgen
|
||||
#
|
||||
#
|
||||
# in: num-hours .... (optional) if given, input into mpe_fieldgen as the
|
||||
# in: number of hours, else default to 3
|
||||
# in: CURRENTLY NOT IMPLEMENTED
|
||||
#
|
||||
# rqd: commands - run_mpe_fieldgen, run_disagg, run_mpe_fieldgen, set_hydro_env
|
||||
#
|
||||
# ver: 20030113
|
||||
# ============================================================================
|
||||
type dd_help 1>/dev/null 2>&1 && . dd_help 2>/dev/null
|
||||
|
||||
Optns="$@"
|
||||
|
||||
# This allows you to call this script from outside of ./whfs/bin
|
||||
# RUN_FROM_DIR=`dirname $0`
|
||||
# WhfsDir=$RUN_FROM_DIR/../..
|
||||
|
||||
## -------------------------------------- set current dir for scripts
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
## -------------------------------------- run run_mpe_fieldgen
|
||||
$PPROC_BIN/run_mpe_fieldgen $Optns
|
||||
#RtnCode=$?
|
||||
#if [ "$RtnCode" != 0 ]; then exit 1; fi
|
||||
|
||||
## -------------------------------------- run run_disagg
|
||||
$PPROC_BIN/run_disagg $Optns
|
||||
RtnCod=$?
|
||||
|
||||
if [ "$RtnCod" != 0 ]; then exit 1; fi
|
||||
|
||||
## -------------------------------------- run run_mpe_fieldgen
|
||||
|
||||
$PPROC_BIN/run_mpe_fieldgen $Optns
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
#!/bin/ksh
|
||||
##############################################################################
|
||||
# run_freezing_level
|
||||
#
|
||||
# Description: Runs the scripts which extract freezing level data from
|
||||
# the RUC80 model and store it in a SHEF-formatted
|
||||
# format for use in the MPE Editor GageQC tools.
|
||||
#
|
||||
# This script is setup to be run from the cron. It will
|
||||
# produce log files in the mpe_editor directory in
|
||||
# /awips/hydroapps/precip_proc/local/data/log or as
|
||||
# specified by the mpe_editor_logs_dir token.
|
||||
#
|
||||
# Usage:
|
||||
# run_freezing_level [number of days to process]
|
||||
#
|
||||
# Revision History:
|
||||
#
|
||||
# April 19, 2006 Bryon Lawrence Original Coding
|
||||
# October 16, 2007 Bryon Lawrence Added option to supply the
|
||||
# number days of freezing level
|
||||
# data to process.
|
||||
# October 17, 2007 Bryon Lawrence Modified how the freezing level
|
||||
# logs are purged. Setup so
|
||||
# that daily log files are created,
|
||||
# and they are purged by
|
||||
# the purge_mpe_files script.
|
||||
##############################################################################
|
||||
#
|
||||
|
||||
#
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
#
|
||||
# Set up environment variables
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_EDITOR_LOGS_DIR=$(get_apps_defaults mpe_editor_logs_dir)
|
||||
|
||||
#
|
||||
# Check for the optional command line argument specifying the number
|
||||
# of days to process. If the number of days is not specified
|
||||
# then default to 1.
|
||||
if [ $# -eq 1 ]
|
||||
then
|
||||
number_of_days=$1
|
||||
else
|
||||
number_of_days=1
|
||||
fi
|
||||
|
||||
#
|
||||
# Set up the log directory.
|
||||
freezinglog_file=`date +freezing_level_log_%m%d`
|
||||
freezinglog=$MPE_EDITOR_LOGS_DIR/${freezinglog_file}
|
||||
|
||||
#
|
||||
# Write a startup message to the log.
|
||||
date_string=`date`
|
||||
echo "############################################################" >> \
|
||||
$freezinglog
|
||||
echo "Running freezing level data retrieval script at $date_string" \
|
||||
>> $freezinglog
|
||||
|
||||
#
|
||||
# Run the scripts to retrieve the freezing level data from the RUC80
|
||||
# model for the model grid points which are located within the
|
||||
# office's MPE forecast area.
|
||||
${RUN_FROM_DIR}/ruc.tcl $RUN_FROM_DIR $number_of_days >> $freezinglog 2>&1
|
||||
|
||||
#
|
||||
# Write a completion message to the freezing level log.
|
||||
date_string=`date`
|
||||
echo "Done retrieving freezing level data at $date_string" >> $freezinglog
|
||||
|
||||
#
|
||||
# End of Script.
|
|
@ -1,134 +0,0 @@
|
|||
#!/usr/bin/ksh
|
||||
|
||||
###############################################################################
|
||||
# FILENAME: run_freezing_station_setup
|
||||
# GENERAL INFORMATION:
|
||||
# DESCRIPTION: Using the template ruc.pl.template, this script will
|
||||
# produce a version of ruc.pl which contains the
|
||||
# RUC80 points within the office's MPE forecast area.
|
||||
# The ruc.pl script is used by ruc.tcl to extract
|
||||
# RUC80 data from the AWIPS netCDF files.
|
||||
#
|
||||
# This routine reads the freezing station list in the
|
||||
# /awips/hydroapps/precip_proc/local/data/app/MPE/station_lists
|
||||
# directory. It formats the latitude/longitude coordinates into
|
||||
# the format required by the ruc.pl script. It then inserts
|
||||
# these latitude/longitude coordinates into the ruc.pl script
|
||||
# as well as updating references to the count of freezing
|
||||
# stations.
|
||||
#
|
||||
# The ruc.pl.template file should never be edited.
|
||||
#
|
||||
# This script is meant to be called from the Build_Station_List
|
||||
# script. It should not be run alone. It assumes that
|
||||
# /awips/hydroapps/set_hydro_env has already been sourced.
|
||||
#
|
||||
#
|
||||
# ORIGINAL AUTHOR: Bryon Lawrence
|
||||
# CREATION DATE: April 3, 2006
|
||||
# ORGANIZATION: OHD/HSEB
|
||||
# MACHINE: Linux
|
||||
# MODIFICATION HISTORY:
|
||||
# DATE PROGRAMMER DESCRIPTION/REASON
|
||||
# April 3, 2006 Bryon Lawrence Initial Coding
|
||||
################################################################################
|
||||
#
|
||||
|
||||
# This allows this script to be run from outside of precip_proc/bin.
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export MPE_SITE_ID=$(get_apps_defaults mpe_site_id)
|
||||
export MPE_STATION_LIST_DIR=$(get_apps_defaults mpe_station_list_dir)
|
||||
|
||||
#
|
||||
# Retrieve the directory to read the freezing level station list data from.
|
||||
#
|
||||
FREEZING_LIST_PATH=$MPE_STATION_LIST_DIR
|
||||
|
||||
#
|
||||
# Retrieve the identifier of the office.
|
||||
#
|
||||
SITE_ID=$MPE_SITE_ID
|
||||
|
||||
#
|
||||
# Create the freezing station file name.
|
||||
#
|
||||
FREEZING_STATION_FILE=$FREEZING_LIST_PATH/${SITE_ID}_freezing_station_list
|
||||
|
||||
#
|
||||
# Create the file to temporarily contain the lat/lon coords. These
|
||||
# will be inserted into the ruc.pl script.
|
||||
#
|
||||
FREEZING_STATION_COORD_FILE=$FREEZING_LIST_PATH/${SITE_ID}_freezing_coords
|
||||
|
||||
#
|
||||
# Check for the existence of the ruc.pl file. If it exists, warn the user
|
||||
# that this file will be overwritten. Give the user the chance to quit.
|
||||
#
|
||||
if [[ -a $RUN_FROM_DIR/ruc.pl ]]
|
||||
then
|
||||
|
||||
printf "The $RUN_FROM_DIR/ruc.pl file exists.\n"
|
||||
printf "It will be overwritten. Procede?\n"
|
||||
read overwrite?" (Y/N):"
|
||||
|
||||
while [[ $overwrite != "N" && $overwrite != "Y" ]]
|
||||
do
|
||||
read overwrite?" Please enter Y or N:"
|
||||
done
|
||||
|
||||
if [ $overwrite == "N" ]
|
||||
then
|
||||
printf "The freezing station setup has been aborted.\n"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# Create the list of RUC80 point latitude/longitude coordinates in the format
|
||||
# required by the ruc.pl script. Skip the first record in the freezing
|
||||
# station list file. This is the number of records in the file.
|
||||
#
|
||||
awk 'BEGIN{count=0}
|
||||
{if(FNR > 1){print "$lat["count"] = "$3";\n$lon["count"] = -"$4";";
|
||||
count=count+1}}' $FREEZING_STATION_FILE > $FREEZING_STATION_COORD_FILE
|
||||
|
||||
#
|
||||
# Retrieve the number of records in the coodinate file.
|
||||
#
|
||||
num_coord_records=`wc -l $FREEZING_STATION_COORD_FILE | cut -f1 -d' '`
|
||||
|
||||
num_coord_records=`echo $num_coord_records | xargs -i expr {} / 2`
|
||||
num_coord_records=`echo $num_coord_records | xargs -i expr {} - 1`
|
||||
|
||||
#
|
||||
# Insert the lat/lon coords into the ruc.pl script.
|
||||
#
|
||||
echo $FREEZING_STATION_COORD_FILE | \
|
||||
xargs -i sed '3r{}' $RUN_FROM_DIR/ruc.pl.template > ruc.pl.temp
|
||||
|
||||
#
|
||||
# Modify the ruc.pl script to contain the correct number of lat/lon
|
||||
# coordinate pairs to process.
|
||||
#
|
||||
echo $num_coord_records | xargs -i sed 's/#####/{}/g' ruc.pl.temp > ruc.pl
|
||||
|
||||
#
|
||||
# Remove the temporary file.
|
||||
#
|
||||
rm -f ruc.pl.temp
|
||||
rm -f $FREEZING_STATION_COORD_FILE
|
||||
|
||||
#
|
||||
# Change the permissions on the ruc.pl script so that they are executable
|
||||
# by all.
|
||||
#
|
||||
chmod 755 ruc.pl
|
||||
|
||||
#
|
||||
# Done
|
||||
#
|
||||
|
||||
exit 0
|
|
@ -1,41 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_gen_areal_ffg
|
||||
# July 19, 2002
|
||||
|
||||
# This allows you to call this script from outside of ./whfs/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export DPA_LOG_DIR=$(get_apps_defaults dpa_log_dir)
|
||||
|
||||
#
|
||||
# Check if gaff execution is desired
|
||||
#
|
||||
|
||||
GAFFEXEC=$(get_apps_defaults gaff_execution)
|
||||
|
||||
if [[ $GAFFEXEC = "OFF" || $GAFFEXEC = "off" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Setup log file
|
||||
#
|
||||
|
||||
Date=`date -u +%m%d`
|
||||
LOG_FILE=$DPA_LOG_DIR/gen_areal_ffg_log_$Date
|
||||
|
||||
#
|
||||
# Run gen_areal_ffg from the appropriate directory
|
||||
#
|
||||
|
||||
runso rary.ohd.pproc gen_areal_ffg_main >> $LOG_FILE
|
||||
|
||||
# Write ending info to log file
|
||||
|
||||
|
||||
DTZ=`date -u +%a@%H:%M:%S`
|
||||
echo $DTZ: Completed job. >>$LOG_FILE
|
||||
echo "-------------" >>$LOG_FILE
|
|
@ -1,36 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# process_qpe_files
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_LOG_DIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
# GAD_DIR is directory which holds the gad program (get_apps_defaults.LX)
|
||||
GAD_DIR=/awips/hydroapps/public/bin
|
||||
|
||||
# Create error log file name
|
||||
logname=`date -u +error.%Y%m%d%H%M%S`
|
||||
errorlog=$MPE_LOG_DIR/$logname
|
||||
|
||||
#
|
||||
# Run mpe_fieldgen for the number of hours specified
|
||||
# Delete error log if empty
|
||||
#
|
||||
|
||||
RFC_LIST=XXXXX,YYYYY
|
||||
DURATION_LIST=1,3,6
|
||||
|
||||
export geo_data=/awips/hydroapps/geo_data
|
||||
export st3_rfc=host
|
||||
export gaq_input_dir=.
|
||||
export gaq_output_xmrg_dir=.
|
||||
export gaq_output_netcdf_dir=.
|
||||
|
||||
$PPROC_BIN/gen_areal_qpe$OS_SUFFIX -r$RFC_LIST -h$DURATION_LIST
|
||||
|
||||
#
|
|
@ -1,272 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# =====================================================================
|
||||
#
|
||||
# run_gribit script
|
||||
#
|
||||
# This script was previously known as "gribits"
|
||||
#
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export grib_out_dir=$(get_apps_defaults mpe_grib_dir)
|
||||
export grib_in_dir=$(get_apps_defaults rfcwide_xmrg_dir)
|
||||
|
||||
USAGE="usage:run_gribit [-p pgm] [-b bin_dir] [-f file_set] [-i in_file]
|
||||
[-n in_dir] [-o out_file] [-t out_dir] [-c control] [-g geo_dir]
|
||||
[-h hrap_coord_file] [-d debug_level] [-l logfile] [-v verbose]
|
||||
[-s search_order]"
|
||||
|
||||
# =====================================================================
|
||||
# Switch Description Default value
|
||||
# ------ ------------------------------ --------------------------
|
||||
# -p Program name. gribit
|
||||
#
|
||||
# -f File set override for st3_rfc, Sets indicated by st3_rfc,
|
||||
# ofs_level, ffg_level tokens. ofs_level, ffg_level tokens.
|
||||
#
|
||||
# -i Input file name. None. Program prompt.
|
||||
#
|
||||
# -n Input directory and path OR: Use input directory and path
|
||||
# sat - $rfcwide_satpre_dir indicated by the $grib_in_dir
|
||||
# token token.
|
||||
# mpe - $rfcwide_xmrg_dir token
|
||||
# s3 - $ofs_griddb_dir token
|
||||
# ffg - $ffg_gridff_dir token
|
||||
# ro - $ffg_gridro_dir token
|
||||
#
|
||||
# -o Output file name. None. Program prompt.
|
||||
#
|
||||
# -t Output directory and path. Use output directory and path
|
||||
# indicated by the $grib_out_dir
|
||||
# token.
|
||||
#
|
||||
# -c Control: g - Encode XMRG into GRIB.
|
||||
# g - encode XMRG into GRIB
|
||||
# u - unpack GRIB and output to
|
||||
# log file
|
||||
# x - decode GRIB to XMRG file
|
||||
#
|
||||
# -g Geo directory (HRAP coordinates Use directory indicated by
|
||||
# directory) the $geo_st3_ascii token.
|
||||
#
|
||||
# -h HRAP coordinate domain file coord.dat
|
||||
#
|
||||
# -d Debug level (d1, d2, or d3). Debug off (level = 0)
|
||||
#
|
||||
# -l Log file on. Log file off.
|
||||
#
|
||||
# -v Verbose program prompts. Suppress program prompts.
|
||||
#
|
||||
# -s Parameter table search sequence Parameter table search
|
||||
# - use user-defined table, if sequence:
|
||||
# available - use built-in Table 2 NCEP
|
||||
# - use built-in Table 128 - use built-in Table 128
|
||||
# - use built-in Table 2 (NCEP) - use user-defined table,
|
||||
# if available
|
||||
#
|
||||
# Note: -g and -h commands used only with command -c x.
|
||||
#
|
||||
# =====================================================================
|
||||
|
||||
# Initialize command line parameters
|
||||
|
||||
Pgm_cl=""
|
||||
Fl_set_cl=""
|
||||
Infil_cl=""
|
||||
Nindir_cl=""
|
||||
Otfil_cl=""
|
||||
Toutdir_cl=""
|
||||
Control_cl=""
|
||||
Geo_cl=""
|
||||
Hcoord_cl=""
|
||||
Dbug_set_cl=""
|
||||
Log_prt="off"
|
||||
Vrb_set_cl="no"
|
||||
Search_set_cl="0"
|
||||
|
||||
# check parameters were entered on the command line
|
||||
while getopts :p:f:i:n:o:t:c:g:h:d:b:lvs cl_args ; do
|
||||
case $cl_args in
|
||||
p) Pgm_cl=$OPTARG;;
|
||||
f) Fl_set_cl=$OPTARG;;
|
||||
i) Infil_cl=$OPTARG;;
|
||||
n) Nindir_cl=$OPTARG;;
|
||||
o) Otfil_cl=$OPTARG;;
|
||||
t) Toutdir_cl=$OPTARG;;
|
||||
c) Control_cl=$OPTARG;;
|
||||
g) Geo_cl=$OPTARG;;
|
||||
h) Hcoord_cl=$OPTARG;;
|
||||
d) Dbug_set_cl=$OPTARG;;
|
||||
l) Log_prt="on";;
|
||||
v) Vrb_set_cl="yes";;
|
||||
s) Search_set_cl="1";;
|
||||
\?) bad_sw=YES;;
|
||||
esac
|
||||
done
|
||||
|
||||
# set parameters for program execution
|
||||
|
||||
if [[ $Pgm_cl = "" ]] ; then
|
||||
Pgm="gribit.LX"
|
||||
else
|
||||
Pgm=$Pgm_cl
|
||||
fi
|
||||
|
||||
if [[ $Infil_cl != "" ]] ; then
|
||||
export grib_in_file=$Infil_cl
|
||||
fi
|
||||
|
||||
if [[ $Otfil_cl != "" ]] ; then
|
||||
export grib_out_file=$Otfil_cl
|
||||
fi
|
||||
|
||||
if [[ $Fl_set_cl = "" ]] ; then
|
||||
Rfcname=$(get_apps_defaults st3_rfc)
|
||||
Ofs_lev=$(get_apps_defaults ofs_level)
|
||||
Ffg_lev=$(get_apps_defaults ffg_level)
|
||||
else
|
||||
Rfcname=$Fl_set_cl
|
||||
Ofs_lev=$Fl_set_cl
|
||||
Ffg_lev=$Fl_set_cl
|
||||
export st3_rfc=$Fl_set_cl
|
||||
export Rfcw_rfcname=$Fl_set_cl
|
||||
export ofs_level=$Ofs_lev
|
||||
export ffg_level=$Ffg_lev
|
||||
fi
|
||||
|
||||
export hrap_domain_id=$Rfcname
|
||||
|
||||
# set input and output directories - can override output directory using -t
|
||||
P1=""
|
||||
if [[ $Nindir_cl = "sat" ]] ; then
|
||||
P1=$(get_apps_defaults pproc_dir)
|
||||
export grib_in_dir=$P1/"input/rfcwide"/$Rfcname/"satpre"
|
||||
export grib_out_dir=$(get_apps_defaults grib_dir)/output
|
||||
elif [[ $Nindir_cl = "mpe" ]] ; then
|
||||
P1=$(get_apps_defaults ofs_files)
|
||||
export grib_in_dir=$P1/$Ofs_lev/"griddb/xmrg"
|
||||
export grib_out_dir=$(get_apps_defaults grib_dir)/output
|
||||
elif [[ $Nindir_cl = "s3" ]] ; then
|
||||
P1=$(get_apps_defaults ofs_files)
|
||||
export grib_in_dir=$P1/$Ofs_lev/"griddb"
|
||||
export grib_out_dir=$(get_apps_defaults grib_dir)/output
|
||||
elif [[ $Nindir_cl = "ffg" ]] ; then
|
||||
P1=$(get_apps_defaults ffg_files)
|
||||
P2=$(get_apps_defaults ffg_gff_level)
|
||||
export grib_in_dir=$P1/$Ffg_lev/$P2
|
||||
export grib_out_dir=$(get_apps_defaults ffg_out_dir)/grib
|
||||
elif [[ $Nindir_cl = "ro" ]] ; then
|
||||
P1=$(get_apps_defaults ffg_files)
|
||||
P2=$(get_apps_defaults ffg_gro_level)
|
||||
export grib_in_dir=$P1/$Ffg_lev/$P2
|
||||
export grib_out_dir=$(get_apps_defaults ffg_out_dir)/grib
|
||||
fi
|
||||
|
||||
if [[ $Control_cl != "" ]] ; then
|
||||
export grib_ctl=$Control_cl
|
||||
fi
|
||||
|
||||
if [[ $Control_cl != "x" ]] ; then
|
||||
if [[ $P1 = "" && $Nindir_cl != "" ]] ; then
|
||||
export grib_in_dir=$Nindir_cl
|
||||
fi
|
||||
if [[ $Toutdir_cl != "" ]] ; then
|
||||
export grib_out_dir=$Toutdir_cl
|
||||
fi
|
||||
elif [[ $Control_cl = "x" ]] ; then
|
||||
if [[ $P1 = "" && $Nindir_cl != "" ]] ; then
|
||||
export grib_out_dir=$Nindir_cl
|
||||
fi
|
||||
if [[ $Toutdir_cl != "" ]] ; then
|
||||
export grib_in_dir=$Toutdir_cl
|
||||
fi
|
||||
if [[ $Geo_cl = "" ]] ; then
|
||||
P3=$(get_apps_defaults geo_data)
|
||||
export hrap_domain_dir=$P3/$Rfcname/"ascii"
|
||||
else
|
||||
export hrap_domain_dir=$Geo_cl
|
||||
fi
|
||||
if [[ $Hcoord_cl = "" ]] ; then
|
||||
export hrap_domain_file="coord_"$Rfcname".dat"
|
||||
else
|
||||
export hrap_domain_file=$Hcoord_cl
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $Dbug_set_cl != "" ]] ; then
|
||||
export grib_debug_output=$Dbug_set_cl
|
||||
fi
|
||||
|
||||
if [[ $Log_prt = "on" ]] ; then
|
||||
export grib_log_output="on"
|
||||
fi
|
||||
|
||||
if [[ $Search_set_cl = "1" ]] ; then
|
||||
export grib_ptbl_search="1"
|
||||
fi
|
||||
|
||||
if [[ $Vrb_set_cl = "yes" ]] ; then
|
||||
export grib_verbose="yes"
|
||||
#echo "Script gribits : Release $Release - $Date"
|
||||
echo "Script gribits"
|
||||
echo "The GRIB output directory is : $(get_apps_defaults grib_out_dir)"
|
||||
echo "The XMRG input directory is : $(get_apps_defaults grib_in_dir)"
|
||||
echo "The GRIB output file is : $(get_apps_defaults grib_out_file)"
|
||||
echo "The XMRG input file is : $(get_apps_defaults grib_in_file)"
|
||||
if [ "$P1" != "" ] ; then
|
||||
echo " $P1"
|
||||
fi
|
||||
if [ "$P2" != "" ] ; then
|
||||
echo " $P2"
|
||||
fi
|
||||
echo "Rfcname is : $Rfcname"
|
||||
if [[ $Control_cl = "x" ]] ; then
|
||||
echo "The hrap_domain directory is : $(get_apps_defaults hrap_domain_dir)"
|
||||
echo "The hrap_domain file is : $(get_apps_defaults hrap_domain_file)"
|
||||
fi
|
||||
fi
|
||||
|
||||
#if [[ $Bin_set_cl = "" ]] ; then
|
||||
# Ex_dir=$(get_apps_defaults grib_dir)/bin/RELEASE
|
||||
# #Added by Guoxian Zhou 06/17/2003
|
||||
# [ -n "$(get_apps_defaults grib_rls)
|
||||
# elif [[ $Bin_set_cl = "d" ]] ; then
|
||||
# Ex_dir=$(get_apps_defaults my_dir)/bin
|
||||
# #Added by Guoxian Zhou 06/17/2003
|
||||
# [ -n "$(get_apps_defaults my_rls)
|
||||
# elif [[ $Bin_set_cl = "a" ]] ; then
|
||||
# Ex_dir=$(get_apps_defaults grib_dir)/bin/ARCHIVE
|
||||
# elif [[ $Bin_set_cl = "m" ]] ; then
|
||||
# Ex_dir=$(get_apps_defaults
|
||||
# #Added by Guoxian Zhou 06/17/2003
|
||||
# [ -n "$(get_apps_defaults mgr_rls)
|
||||
# elif [[ $Bin_set_cl = "ma" ]] ; then
|
||||
# Ex_dir=$(get_apps_defaults mgr_dir)/bin/ARCHIVE
|
||||
# else
|
||||
# Ex_dir=$Bin_set_cl
|
||||
# fi
|
||||
|
||||
Ex_dir=$(get_apps_defaults pproc_bin)
|
||||
|
||||
#if [ ! -d $Ex_dir ] ; then
|
||||
# print "ERROR: directory $Ex_dir not found."
|
||||
# exit 1
|
||||
# fi
|
||||
#if [ ! -f $Ex_dir/$Pgm ] ; then
|
||||
# print "ERROR: program $Pgm not found in directory $Ex_dir."
|
||||
# exit 1
|
||||
# fi
|
||||
|
||||
if [[ $Vrb_set_cl = "yes" ]] ; then
|
||||
echo "Executing from : $Ex_dir/$Pgm"
|
||||
fi
|
||||
|
||||
eval $Ex_dir/$Pgm
|
||||
#runso rary.ohd.pproc gribit_main_main_
|
||||
echo GRIBIT DONE
|
||||
#===============================================================
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_hpe_fieldgen
|
||||
#
|
||||
# This script calls the High-resolution Precipitation Estimator (HPE)
|
||||
# Field Generator executable. Adapted from run_mpe_fieldgen
|
||||
#
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
|
||||
# Create error log file name
|
||||
logname=`date -u +error.%Y%m%d%H%M%S`
|
||||
normalprint=`date -u +print.%Y%m%d%H%M%S`
|
||||
errorlog=$HPE_LOG_DIR/$logname
|
||||
printlog=$HPE_LOG_DIR/$normalprint
|
||||
|
||||
#
|
||||
# Run hpe_fieldgen for the number of runs specified
|
||||
|
||||
export HPE_TIMELAG=`get_apps_defaults hpe_timelag`
|
||||
|
||||
if [[ $1 -gt $HPE_TIMELAG ]]
|
||||
then
|
||||
HPE_TIMELAG=$1
|
||||
fi
|
||||
|
||||
runso rary.ohd.pproc hpe_fieldgen_main -l $HPE_TIMELAG >> $printlog 2>> $errorlog
|
||||
|
||||
# Delete error and normal print logs if empty
|
||||
if [[ ! -s $errorlog ]]
|
||||
then
|
||||
rm -f $errorlog
|
||||
fi
|
||||
if [[ ! -s $printlog ]]
|
||||
then
|
||||
rm -f $printlog
|
||||
fi
|
||||
exit 0
|
|
@ -1,131 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# This script gathers the lightning data from the file in netcdf format and
|
||||
# inserts them in the "lightning" table.
|
||||
#
|
||||
|
||||
# umask 002 # which is 775 (rwxrwxr_x) or umask 001 = 776 (rwxrwxrw_)
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export LIGHTNING_LOG_DIR=$(get_apps_defaults lightning_log_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
echo Starting...
|
||||
INFILE=`get_apps_defaults lightning_input_dir`
|
||||
|
||||
TEMPDIR=$LIGHTNING_LOG_DIR
|
||||
export INFILE TEMPDIR
|
||||
|
||||
#
|
||||
# set up log filename
|
||||
#
|
||||
|
||||
TZ=GMT
|
||||
export TZ
|
||||
|
||||
Dte=`date +%m%d%Y`
|
||||
logfnm=$LIGHTNING_LOG_DIR/lightning_proc_log_$Dte
|
||||
echo $logfnm
|
||||
|
||||
Starttime=`date`
|
||||
echo "-------- Run_lightning_proc started at: $Starttime --------" >> $logfnm
|
||||
echo "****** Operating system is " `uname` >> $logfnm
|
||||
echo "****** Database name is $DB_NAME " >> $logfnm
|
||||
### Get current the year, Month, Day
|
||||
YY=`date '+%Y'`
|
||||
MM=`date '+%m'`
|
||||
DD=`date '+%d'`
|
||||
HR=`date '+%H'`
|
||||
|
||||
### Compute 1 hour behind from current time
|
||||
hour=`expr $HR - 1`
|
||||
|
||||
### Compute ending day of a month or month of a year
|
||||
if [ $hour -le 9 ]
|
||||
then
|
||||
hour=0${hour}
|
||||
else
|
||||
hour=${hour}
|
||||
fi
|
||||
|
||||
if [ $hour -lt 0 ]
|
||||
then
|
||||
hour=23
|
||||
DD=`expr $DD - 1`
|
||||
|
||||
if [ $DD -le 9 ]
|
||||
then
|
||||
DD=0${DD}
|
||||
else
|
||||
DD=${DD}
|
||||
fi
|
||||
|
||||
if [ $DD -eq 0 ]
|
||||
then
|
||||
MM=`expr $MM - 1`
|
||||
if [ $MM -le 9 ]
|
||||
then
|
||||
MM=0${MM}
|
||||
else
|
||||
MM=${MM}
|
||||
fi
|
||||
|
||||
if [ $MM -eq 0 ]
|
||||
then
|
||||
MM=12
|
||||
YY=`expr $YY - 1`
|
||||
fi
|
||||
case $MM in
|
||||
02) YRMOD=`expr $YY % 4` #Leap year
|
||||
if [ $YRMOD -eq 0 -o $YY -eq 00 ] #do not change -eq 0 and -eq 00
|
||||
then
|
||||
DD=29
|
||||
else
|
||||
DD=28
|
||||
fi
|
||||
;;
|
||||
04|06|09|11) DD=30
|
||||
;;
|
||||
01|03|05|07|08|10|12) DD=31
|
||||
;;
|
||||
*) echo "ERROR!!! Invalid month $MM" >> $logfnm
|
||||
exit
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
Datetime=${YY}${MM}${DD}_${hour}00
|
||||
echo "$Datetime is the file that will be processed." >> $logfnm
|
||||
|
||||
#** Copy file to data dir
|
||||
if [ -f "$INFILE/$Datetime" ]; then
|
||||
cp $INFILE/$Datetime $TEMPDIR/.
|
||||
fstat=$?
|
||||
if test $fstat -ne 0; then
|
||||
echo "ERROR: copy $TEMPDIR/$Datetime failed." >> $logfnm
|
||||
else
|
||||
echo "copy $TEMPDIR/$Datetime succeeded." >> $logfnm
|
||||
chmod 755 $TEMPDIR/2*
|
||||
fi
|
||||
else
|
||||
echo "WARNING: CAN NOT FIND FILE $INFILE/$Datetime !!!" >> $logfnm
|
||||
Endtime=`date`
|
||||
echo "-------- Run_lightning_proc ended at: $Endtime --------" >> $logfnm
|
||||
echo " " >> $logfnm
|
||||
exit
|
||||
fi
|
||||
|
||||
#*** Run lightning_proc program to process data
|
||||
for fname in `ls -1F $TEMPDIR/2*`
|
||||
do
|
||||
echo Processing $fname >> $logfnm
|
||||
runso rary.ohd.pproc lightning_proc_main $fname >> $logfnm 2>>$logfnm
|
||||
rm -f $fname
|
||||
done
|
||||
|
||||
Endtime=`date`
|
||||
echo "-------- Run_lightning_proc ended at: $Endtime --------" >> $logfnm
|
||||
echo " " >> $logfnm
|
|
@ -1,55 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_mpe_fieldgen
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
. $RUN_FROM_DIR/../../check_app_context
|
||||
|
||||
export SSHP_INVOKE_MAP_PREPROCESS=$(get_apps_defaults sshp_invoke_map_preprocess)
|
||||
export SSHP_LOG_DIR=$(get_apps_defaults sshp_log_dir)
|
||||
export MPE_GAGE_QC=$(get_apps_defaults mpe_gage_qc)
|
||||
export MPE_LOG_DIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
export PPROC_LOCAL_BIN=$(get_apps_defaults pproc_local_bin)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export WHFS_BIN_DIR=$(get_apps_defaults whfs_bin_dir)
|
||||
export FXA_LOCAL_SITE=$(get_apps_defaults fxa_local_site)
|
||||
|
||||
# Create error log file name
|
||||
logname=`date -u +error.%Y%m%d%H%M%S`
|
||||
errorlog=$MPE_LOG_DIR/$logname
|
||||
|
||||
|
||||
#
|
||||
# run build_hourly script which generates hourly
|
||||
# PP reports based on sub-hourly PP reports.
|
||||
# it defaults to the top of the current hour
|
||||
|
||||
$PPROC_LOCAL_BIN/run_build_hourly
|
||||
|
||||
#
|
||||
# Run mpe_fieldgen for the number of hours specified
|
||||
# Delete error log if empty
|
||||
#
|
||||
|
||||
runso rary.ohd.pproc mpe_fieldgen_main $@ 2>>$errorlog
|
||||
echo mpe_fieldgen error log located at: $errorlog
|
||||
|
||||
if [ "$SSHP_INVOKE_MAP_PREPROCESS" = "ON" ]
|
||||
then
|
||||
$WHFS_BIN_DIR/run_SSHP_MAP_preprocess >$SSHP_LOG_DIR/MAPPreprocessor.stdout.log 2>>$SSHP_LOG_DIR/MAPPreprocessor.stdout.log &
|
||||
|
||||
fi
|
||||
|
||||
if [[ ! -s $errorlog ]]
|
||||
then
|
||||
rm -f $errorlog
|
||||
fi
|
||||
|
||||
# Run Biasmesgen.
|
||||
# this should only be run at WFOs
|
||||
$PPROC_BIN/run_biasmesgen
|
||||
|
||||
exit 0
|
|
@ -1,110 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
# run_mpe_whfs
|
||||
# run script for set of programs to complete mpe analysis
|
||||
# 06/06/2006
|
||||
|
||||
# This allows you to run this script outside of ./standard/bin
|
||||
#
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Export the value of FXA_HOME here. This allows read_env.sh to be
|
||||
# sourced before set_hydro_env. This prevents set_hydro_env environment
|
||||
# and token settings from being overwritten by read_env.sh.
|
||||
export FXA_HOME=/awips/fxa
|
||||
|
||||
# Set up the AWIPS environment.
|
||||
#. $FXA_HOME/readenv.sh
|
||||
|
||||
# Set up some environment variables for WHFS applications
|
||||
#
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export SSHP_INVOKE_MAP_PREPROCESS=$(get_apps_defaults sshp_invoke_map_preprocess)
|
||||
export SSHP_LOG_DIR=$(get_apps_defaults sshp_log_dir)
|
||||
export MPE_GAGE_QC=$(get_apps_defaults mpe_gage_qc)
|
||||
export MPE_LOG_DIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
export PPROC_LOCAL_BIN=$(get_apps_defaults pproc_local_bin)
|
||||
export PPROC_LOG=$(get_apps_defaults pproc_log)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export WHFS_BIN_DIR=$(get_apps_defaults whfs_bin_dir)
|
||||
|
||||
# Setup log output file name
|
||||
#
|
||||
fnm=$PPROC_LOG/misc/run_mpe_whfs.log
|
||||
tmp=$PPROC_LOG/misc/run_mpe_whfs.tmp
|
||||
|
||||
# Save only the latest events in the log file
|
||||
#
|
||||
tail -1200 $fnm > $tmp
|
||||
mv $tmp $fnm
|
||||
chmod 777 $fnm
|
||||
|
||||
# Run the series of programs
|
||||
#
|
||||
echo "---------------------------------------------" >> $fnm
|
||||
|
||||
# build hourly precip reports from sub-hourly data.
|
||||
#
|
||||
Dte=`date -u`
|
||||
echo "Initiating build_hourly job at: "$Dte >> $fnm
|
||||
$PPROC_LOCAL_BIN/run_build_hourly
|
||||
|
||||
# Run lightning data retrieval process if token set
|
||||
#
|
||||
if [ $MPE_GAGE_QC = "ON" ]
|
||||
then
|
||||
Dte=`date -u`
|
||||
echo "Initiating lightning proc job at: "$Dte >> $fnm
|
||||
$PPROC_BIN/run_lightning_proc
|
||||
else
|
||||
echo "Not running lightning proc job since mpe_gage_qc is OFF" >> $fnm
|
||||
fi
|
||||
|
||||
# run mpe_fieldgen
|
||||
# Create fieldgen error log file name; later, delete error log if empty
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Initiating mpe_fieldgen at: "$Dte >> $fnm
|
||||
|
||||
logname=`date -u +error.%Y%m%d%H%M%S`
|
||||
errorlog=$MPE_LOG_DIR/$logname
|
||||
|
||||
NUM_HOURS=$1
|
||||
echo $errorlog
|
||||
runso rary.ohd.pproc mpe_fieldgen_main $NUM_HOURS 1>>$fnm 2>>$errorlog
|
||||
|
||||
if [[ ! -s $errorlog ]]
|
||||
then
|
||||
rm -f $errorlog
|
||||
fi
|
||||
|
||||
# Run the SiteSpecific MAP preprocessor if token set, to get basin MAP estimates
|
||||
#
|
||||
if [ "$SSHP_INVOKE_MAP_PREPROCESS" = "ON" ]
|
||||
then
|
||||
echo "Initiating run_SSHP_MAP_preprocess at: "$Dte >> $fnm
|
||||
$WHFS_BIN_DIR/run_SSHP_MAP_preprocess \
|
||||
>$SSHP_LOG_DIR/MAPPreprocessor.stdout.log 2>>$SSHP_LOG_DIR/MAPPreprocessor.stdout.log &
|
||||
fi
|
||||
|
||||
# Run Biasmesgen.
|
||||
# this should only be run at WFOs
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Initiating run_biasmesgen at: "$Dte >> $fnm
|
||||
|
||||
$PPROC_BIN/run_biasmesgen
|
||||
|
||||
#
|
||||
# initiate the script to transmit the best estimate QPE
|
||||
# from the RFCs to the WFOs. This should not be
|
||||
# enabled at the WFOs.
|
||||
|
||||
#Dte=`date -u`
|
||||
#echo "Initiating transmit_rfc_qpe at: "$Dte >> $fnm
|
||||
#$PPROC_LOCAL_BIN/transmit_rfc_qpe
|
||||
|
||||
# wrap it up
|
||||
#
|
||||
Dte=`date -u`
|
||||
echo "Completed run_mpe_whfs job at: "$Dte >> $fnm
|
|
@ -1,31 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_nc2grib
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
#export NC2GRIB_LOG_DIR=$(get_apps_defaults pproc_util_log_dir)
|
||||
export PPROC_LOCAL_BIN=$(get_apps_defaults pproc_local_bin)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export WHFS_BIN_DIR=$(get_apps_defaults whfs_bin_dir)
|
||||
|
||||
# Create error log file name
|
||||
logname=NC2GRIB_`date -u +error.%Y%m%d%H%M%S`
|
||||
errorlog=$NC2GRIB_LOG_DIR/$logname
|
||||
|
||||
#
|
||||
# Run nc2grib with parameters specified
|
||||
# Delete error log if empty
|
||||
#
|
||||
|
||||
runso rary.ohd.pproc nc2grib_main $@ 2>>$errorlog
|
||||
echo nc2grib error log located at: $errorlog
|
||||
if [[ ! -s $errorlog ]]
|
||||
then
|
||||
rm -f $errorlog
|
||||
fi
|
||||
|
||||
exit $?
|
||||
#exit 0
|
|
@ -1,55 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# run_post_analysis
|
||||
# October 18, 2005
|
||||
|
||||
# This allows you to call this script from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for WHFS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export MPE_LOG_DIR=$(get_apps_defaults rfcwide_logs_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
#set the resource file searching directory
|
||||
XAPPLRESDIR=$RUN_FROM_DIR
|
||||
export XAPPLRESDIR
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# Setup log file name
|
||||
#
|
||||
LOGFILE=$MPE_LOG_DIR/post_analysis.log
|
||||
TEMPFILE=$MPE_LOG_DIR/post_analysis.tmp
|
||||
#
|
||||
HOSTNAME=`hostname`
|
||||
#
|
||||
# save only the latest events in the log file
|
||||
#
|
||||
if [ -f $LOGFILE ]
|
||||
then
|
||||
{
|
||||
tail -1200 $LOGFILE > $TEMPFILE
|
||||
mv $TEMPFILE $LOGFILE
|
||||
}
|
||||
else
|
||||
echo "--------------------------" > $LOGFILE
|
||||
fi
|
||||
|
||||
# log start of script
|
||||
#
|
||||
echo "------------------------------ " >> $LOGFILE
|
||||
Dte=`date -u`
|
||||
echo Invoking $PPROC_BIN/post_analysis$OS_SUFFIX at $Dte >> $LOGFILE
|
||||
echo Current system: $HOSTNAME >> $LOGFILE
|
||||
echo Current database: $DB_NAME >> $LOGFILE
|
||||
|
||||
#
|
||||
# program usage:
|
||||
# post_analysis [-ddebug] [-t<specified date time in YYYYMMDDHH] [--optional args]#
|
||||
#
|
||||
# Run post_analysis from the appropriate directory
|
||||
#
|
||||
$PPROC_BIN/post_analysis$OS_SUFFIX >> $LOGFILE 2>&1
|
||||
|
||||
Dte=`date -u `
|
||||
echo Completed post_analysis at $Dte >> $LOGFILE
|
|
@ -1,165 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# ==============================================================
|
||||
# start_gage_pp - script to start up the Gage Precip Processor
|
||||
#
|
||||
# This script will return one of the following completion codes
|
||||
# to the operating system indicating success or failure of its attempt
|
||||
# to start the Gage Precip Processor program:
|
||||
#
|
||||
# 0 The attempt to start Gage Precip Processor was successful.
|
||||
# 1 An invalid argument was passed to this script.
|
||||
# 2 The gage_pp_userid token is not defined.
|
||||
# 3 The user does not have permission to start Gage Precip Processor.
|
||||
# 4 The directory specified for data files does not exist.
|
||||
# 5 The directory specified for the log files does not exist.
|
||||
# 6 The directory specified for the error files does not exist.
|
||||
# 7 The directory specified for the executables does not exist.
|
||||
# 8 The Gage Precip Processor is already running.
|
||||
# 9 The Gage Precip Processor is not allowed to run on this system.
|
||||
# Check the gage_pp_host token for the valid system name.
|
||||
# 10 The gage_pp_enable token is off. There will be no data for gage_pp
|
||||
# to process. Gage_pp will not be started.
|
||||
#
|
||||
# Revision History
|
||||
# Moria Shebsovich July 2, 2004 Created.
|
||||
#
|
||||
# =================================================================
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
HOSTNAME=`hostname`
|
||||
|
||||
#
|
||||
# set up SOME environment variables for AWIPS Team applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export GAGE_PP_USERID=$(get_apps_defaults gage_pp_userid)
|
||||
export GAGE_PP_HOST=$(get_apps_defaults gage_pp_host)
|
||||
export GAGE_PP_ENABLE=$(get_apps_defaults gage_pp_enable)
|
||||
export GAGE_PP_DATA=$(get_apps_defaults gage_pp_data)
|
||||
export GAGE_PP_LOG=$(get_apps_defaults gage_pp_log)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
GAGE_PP_LOGDIR=$GAGE_PP_LOG
|
||||
|
||||
if [[ ! -d $GAGE_PP_LOGDIR ]]
|
||||
then
|
||||
print "start_gage_pp: Directory specified for log files does not " \
|
||||
"exist " $GAGE_PP_LOGDIR
|
||||
exit 5
|
||||
fi
|
||||
|
||||
#
|
||||
# define the gage_pp daily log file name
|
||||
LOGFILE=`date +gage_pp_log_%m%d`
|
||||
GAGE_PP_LOGFILE=$GAGE_PP_LOGDIR/$LOGFILE
|
||||
|
||||
# Check to make sure that the gage_pp_enable token is set to ON.
|
||||
if [[ $GAGE_PP_ENABLE != "ON" ]]
|
||||
then
|
||||
|
||||
print "gage_pp_enable token must be set to 'ON'. Gage Precip Processor" \
|
||||
"not started." | tee -a $GAGE_PP_LOGFILE
|
||||
exit 10
|
||||
fi
|
||||
|
||||
# Check which system the Gage Precip Processor is allowed to run on.
|
||||
# This is specified by the gage_pp_host token which is represented
|
||||
# by the GAGE_PP_HOST variable set in set_hydro_env.
|
||||
# If this script is not being run on that system, then log an error message.
|
||||
|
||||
COMPARISON_STRING=`echo $HOSTNAME $GAGE_PP_HOST | awk -F' ' 'match ($1, $2)'`
|
||||
|
||||
if test -z "$COMPARISON_STRING"
|
||||
then
|
||||
|
||||
print "Gage Precip Processor cannot be started on '$HOSTNAME'" \
|
||||
"It can only be started from '$GAGE_PP_HOST'." >> $GAGE_PP_LOGFILE
|
||||
exit 9
|
||||
fi
|
||||
|
||||
# First check to see if the user trying to start process is the one
|
||||
# allowed to do it
|
||||
if [[ $GAGE_PP_USERID = "" ]]
|
||||
then
|
||||
print start_gage_pp: User allowed to start Gage Precip Processor \
|
||||
has not been assigned. >> $GAGE_PP_LOGFILE
|
||||
print " "Please assign a user id to the 'gage_pp_userid' >> $GAGE_PP_LOGFILE
|
||||
print " "token for the get_apps_defaults process. >> $GAGE_PP_LOGFILE
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if [ $LOGNAME != $GAGE_PP_USERID ]
|
||||
then
|
||||
print start_gage_pp: User $LOGNAME is not allowed to start \
|
||||
Gage Precip Processor. >> $GAGE_PP_LOGFILE
|
||||
print " "Only user $GAGE_PP_USERID can start Gage Precip Processor! >> $GAGE_PP_LOGFILE
|
||||
exit 3
|
||||
fi
|
||||
|
||||
#
|
||||
# get the directory specifications
|
||||
|
||||
GAGEPPDATA=$GAGE_PP_DATA
|
||||
GAGEPPBIN=$PPROC_BIN
|
||||
|
||||
#
|
||||
# Check to see if Gage Precip Processor data, log, and error directories exists
|
||||
|
||||
if [[ ! -d $GAGEPPDATA ]]
|
||||
then
|
||||
print "start_gage_pp: Directory specified for data files does not " \
|
||||
"exist " $GAGEPPDATA >> $GAGE_PP_LOGFILE
|
||||
exit 4
|
||||
fi
|
||||
|
||||
|
||||
if [[ ! -d $GAGEPPBIN ]]
|
||||
then
|
||||
print "start_gage_pp: Directory specified for executables does not " \
|
||||
"exist " $GAGEPPBIN >> $GAGE_PP_LOGFILE
|
||||
exit 7
|
||||
fi
|
||||
|
||||
# Check if there is an instance of the Gage Precip Processor already running
|
||||
# for this input data directory, and on this machine
|
||||
# first, get the pid of the most recent run of the gage_pp
|
||||
# for this input data directory
|
||||
|
||||
export DUPSTART_LOGFILE=$GAGE_PP_LOGDIR/duplicate_start.log
|
||||
|
||||
export PIDFILE=$GAGEPPDATA/gage_pp_pid.dat
|
||||
export FILEPID=`cat $PIDFILE`
|
||||
|
||||
if [[ $FILEPID != "" ]]
|
||||
then
|
||||
if ps -p $FILEPID | grep gage_pp > /dev/null 2>&1
|
||||
then
|
||||
print "***********************************************" >> $DUPSTART_LOGFILE
|
||||
print "Gage Precip Processor ALREADY running (pid $FILEPID)." >> $DUPSTART_LOGFILE
|
||||
print "Additional run prevented. $(date -u) " >> $DUPSTART_LOGFILE
|
||||
print " " >> $DUPSTART_LOGFILE
|
||||
|
||||
exit 8
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Announce startup of gage_pp
|
||||
|
||||
print ===================================================================== >> $GAGE_PP_LOGFILE
|
||||
print Starting gage_pp$OS_SUFFIX as $LOGNAME at $(date -u +"%T %Z") on \
|
||||
$(date -u +"%a %b %d %Y") on $HOSTNAME >> $GAGE_PP_LOGFILE
|
||||
print ===================================================================== >> $GAGE_PP_LOGFILE
|
||||
print " " >> $GAGE_PP_LOGFILE
|
||||
|
||||
|
||||
# Remove stop flag file if it exists
|
||||
|
||||
rm -f $GAGEPPDATA/stop_gage_pp
|
||||
|
||||
# Start program in background
|
||||
# Redirect standard input and output to /dev/null so that
|
||||
# gage_pp.OS does not hang up when called from ssh.
|
||||
$GAGEPPBIN/gage_pp$OS_SUFFIX < /dev/null > /dev/null 2>&1 &
|
||||
|
||||
exit 0
|
|
@ -1,241 +0,0 @@
|
|||
#!/bin/ksh
|
||||
#
|
||||
# This script is normally started by or stopped from the DHRgather script
|
||||
# which runs via cron at set user time to check what precipitation cat
|
||||
# all the radars in a WFO or RFC AOR are in.
|
||||
# If any are in precip mode, the decoders and HPE field generator below
|
||||
# will start if they're not running.
|
||||
#
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30, 2007
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export DSP_PROD_DIR=$(get_apps_defaults dsp_prod_dir)
|
||||
export DHR_PROD_DIR=$(get_apps_defaults dhr_prod_dir)
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export DHR_LOG_DIR=$(get_apps_defaults dhr_log_dir)
|
||||
|
||||
test -e $HPE_LOG_DIR/start_hpe_ctl
|
||||
FOUND=$?
|
||||
if test FOUND -eq 1
|
||||
then
|
||||
touch $HPE_LOG_DIR/start_hpe_ctl
|
||||
fi
|
||||
|
||||
#
|
||||
# define functions for checking for a stop file and
|
||||
# logging a message. note that for the
|
||||
# shell function "test", a true condition returns 0.
|
||||
|
||||
checkHPEstop()
|
||||
{
|
||||
STOP_DIR=$1
|
||||
test -r $STOP_DIR/stop_hpe_process
|
||||
FILES_FOUND=$?
|
||||
if test $FILES_FOUND -eq 0
|
||||
then
|
||||
|
||||
# delay stopping just to make sure that there really isn't any precip
|
||||
# for all radars within the CWFA
|
||||
# check the time for the file against the current time
|
||||
# if greater than 20 minutes then stop HPE
|
||||
|
||||
tnow=`date +%s`
|
||||
tfile=`date +%s -r $STOP_DIR/stop_hpe_process`
|
||||
agesecs=$(($tnow-$tfile))
|
||||
if [[ $agesecs -ge 1200 ]]
|
||||
then
|
||||
return 1
|
||||
else
|
||||
echo "stop_hpe_process file found. Will stop HPE after 1200 sec (20min) " >> $hpefnm
|
||||
echo "unless new precip occurs within that time. Time elapsed = $agesecs seconds " \
|
||||
>> $STOP_DIR/processhpe_log
|
||||
return 2
|
||||
fi
|
||||
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
checkDataRun()
|
||||
{
|
||||
# Check to see if age on file is more than 20 minutes
|
||||
tnow=`date +%s`
|
||||
tfile=`date +%s -r $DHR_LOG_DIR/dhrgather_control` #$HPE_LOG_DIR/start_hpe_log`
|
||||
agesecs=$(($tnow-$tfile))
|
||||
if [[ $agesecs -ge 1200 ]]
|
||||
then
|
||||
return 1
|
||||
else
|
||||
return 2
|
||||
fi
|
||||
}
|
||||
|
||||
logHPEstop()
|
||||
{
|
||||
LOG_FILE=$1
|
||||
|
||||
DTZ=`date -u`
|
||||
echo "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" >> $LOG_FILE
|
||||
echo "Terminating HPE processes." >> $LOG_FILE
|
||||
echo "Stopfile found at:" $DTZ >> $LOG_FILE
|
||||
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" >> $LOG_FILE
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
|
||||
hpefnm=$HPE_LOG_DIR/processhpe_log
|
||||
hpetmp=$HPE_LOG_DIR/processhpe_tmp
|
||||
|
||||
# Remove the stop file because either at least one radar has
|
||||
# indicated there's precip or this script was started manually.
|
||||
# Script will check if one has been entered after it starts.
|
||||
|
||||
if [[ -r $HPE_LOG_DIR/stop_hpe_process ]]
|
||||
then
|
||||
rm -f $HPE_LOG_DIR/stop_hpe_process
|
||||
fi
|
||||
|
||||
# get the time lag variable to check against data files
|
||||
#
|
||||
|
||||
TIMELAG=`get_apps_defaults hpe_timelag`
|
||||
|
||||
#
|
||||
# enter an infinite loop. for each pass thru main loop,
|
||||
# run the DHR and DSP decoders every minute. Then check to
|
||||
# see when the HPE field generator should be run according to
|
||||
# the users frequency token. Will do this until there's no more
|
||||
# precip in the WFO or RFC AOR and this script is stopped.
|
||||
#
|
||||
|
||||
Dte=`date -u`
|
||||
echo "" >> $hpefnm
|
||||
echo "HPE processing starting up " $Dte >> $hpefnm
|
||||
echo "DHR and DSP decoders running every minute" >> $hpefnm
|
||||
|
||||
let "loop_cnt=0"
|
||||
|
||||
HPE_RUNFREQ=`get_apps_defaults hpe_runfreq`
|
||||
echo "HPE Run Frequency $HPE_RUNFREQ minutes " >> $hpefnm
|
||||
|
||||
while true
|
||||
do
|
||||
|
||||
#
|
||||
# save log latest entries
|
||||
#
|
||||
|
||||
if [[ -s $hpefnm ]]
|
||||
then
|
||||
tail -1200 $hpefnm > $hpetmp
|
||||
mv $hpetmp $hpefnm
|
||||
fi
|
||||
|
||||
checkHPEstop $HPE_LOG_DIR
|
||||
STOP_FOUND=$?
|
||||
|
||||
if test STOP_FOUND -eq 1
|
||||
then
|
||||
|
||||
echo "Elapsed time reached, stopping..." >> $hpefnm
|
||||
|
||||
|
||||
# if there's no precip in area, remove the temp files
|
||||
#
|
||||
if [[ -d $DHR_PROD_DIR && -n "$DHR_PROD_DIR" ]]
|
||||
then
|
||||
echo "Removing temp DHR files in $DHR_PROD_DIR due to no rain " $Dte \
|
||||
>> $hpefnm
|
||||
rm -f $DHR_PROD_DIR/*
|
||||
else
|
||||
echo "Attempted to remove files but " >> $hpefnm
|
||||
echo "DHR PROD DIR $DHR_PROD_DIR invalid " $Dte >> $hpefnm
|
||||
fi
|
||||
|
||||
if [[ -d $DSP_PROD_DIR && -n "$DSP_PROD_DIR" ]]
|
||||
then
|
||||
echo "Removing temp DSP files in $DSP_PROD_DIR due to no rain " $Dte >> $hpefnm
|
||||
rm -f $DSP_PROD_DIR/*
|
||||
else
|
||||
echo "Attempted to remove files but " >> $hpefnm
|
||||
echo "DSP PROD DIR $DSP_PROD_DIR invalid " $Dte >> $hpefnm
|
||||
fi
|
||||
|
||||
logHPEstop $hpefnm
|
||||
exit 0
|
||||
elif test STOP_FOUND -eq 2
|
||||
then
|
||||
echo "Stop condition not met, continuing..." >> $hpefnm
|
||||
fi
|
||||
|
||||
checkDataRun $HPE_LOG_DIR
|
||||
DATA_RUN=$?
|
||||
|
||||
if test DATA_RUN -eq 1
|
||||
then
|
||||
echo "No data received in 20 minutes, exiting. " `date -u` >> $hpefnm
|
||||
exit 0
|
||||
fi
|
||||
|
||||
$PPROC_BIN/Run_DecodeDHR
|
||||
DHRDECODE_RUN=$?
|
||||
if test $DHRDECODE_RUN -eq 0
|
||||
then
|
||||
echo "Finished running dhr decoders " `date -u` >> $hpefnm
|
||||
fi
|
||||
$PPROC_BIN/Run_DecodeDSP
|
||||
DSPDECODE_RUN=$?
|
||||
if test $DSPDECODE_RUN -eq 0
|
||||
then
|
||||
echo "Finished running dsp decoders " `date -u` >> $hpefnm
|
||||
fi
|
||||
|
||||
sleep 60
|
||||
|
||||
# Check to see if age on file is more than $HPE_RUNFREQ minutes
|
||||
runfreq=$(($HPE_RUNFREQ * 60))
|
||||
tnow=`date +%s`
|
||||
tfile=`date +%s -r $HPE_LOG_DIR/start_hpe_ctl`
|
||||
agesecs=$(($tnow-$tfile))
|
||||
if [[ $agesecs -lt $runfreq ]]
|
||||
then
|
||||
let "loop_cnt=1"
|
||||
else
|
||||
let "loop_cnt=2"
|
||||
fi
|
||||
|
||||
# checking every run allows the user to change this run frequency
|
||||
|
||||
if [[ loop_cnt -eq 2 ]]
|
||||
then
|
||||
#
|
||||
# get the time lag variable again in case it has changed
|
||||
#
|
||||
|
||||
touch $HPE_LOG_DIR/start_hpe_ctl
|
||||
echo "touching start_hpe_ctl file before starting hpe_fieldgen " $Dte >> $hpefnm
|
||||
Dte=`date -u`
|
||||
TIMELAG=`get_apps_defaults hpe_timelag`
|
||||
echo "starting HPE field generator with time lag of $TIMELAG minutes " $Dte >> $hpefnm
|
||||
$PPROC_BIN/run_hpe_fieldgen $TIMELAG
|
||||
|
||||
# checking every run allows the user to change this run frequency
|
||||
HPE_RUNFREQ2=`get_apps_defaults hpe_runfreq`
|
||||
|
||||
if [[ $HPE_RUNFREQ -ne $HPE_RUNFREQ2 ]]
|
||||
then
|
||||
HPE_RUNFREQ=$HPE_RUNFREQ2
|
||||
echo "HPE run freq changed to $HPE_RUNFREQ minutes " >> $hpefnm
|
||||
fi
|
||||
fi
|
||||
done
|
||||
exit 0
|
|
@ -1,72 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
#
|
||||
# script for allowing the restart of the HPE crons
|
||||
#
|
||||
# HPE crons to gather radar information run every minute
|
||||
# This script just removes the stop file if present which the crons
|
||||
# are looking for so that they can start again via the cron.
|
||||
#
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 31, 2007
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
|
||||
# this script uses the following set_hydro_env variables:
|
||||
# HPE_LOG_DIR
|
||||
#
|
||||
|
||||
# GAD_DIR is directory which holds the gad program (get_apps_defaults.LX)
|
||||
GAD_DIR=/awips/hydroapps/public/bin
|
||||
|
||||
# setup log files
|
||||
#
|
||||
|
||||
if [[ ! -d $HPE_LOG_DIR ]]
|
||||
then
|
||||
print "Directory specified for placing stop_hpe_crons file does not exist " $HPE_LOG_DIR
|
||||
echo "Directory specified for placing stop_hpe_crons file does not exist " $HPE_LOG_DIR
|
||||
|
||||
exit -1
|
||||
fi
|
||||
|
||||
hpefnm=$HPE_LOG_DIR/start_hpe_log
|
||||
hpetmp=$HPE_LOG_DIR/start_hpe_tmp
|
||||
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
|
||||
if [[ -s $hpefnm ]]
|
||||
then
|
||||
tail -1200 $hpefnm > $hpetmp
|
||||
mv $hpetmp $hpefnm
|
||||
fi
|
||||
|
||||
echo "Attempting to remove stop_hpe_crons file... " >> $hpefnm
|
||||
|
||||
if [[ -r $HPE_LOG_DIR/stop_hpe_crons ]]
|
||||
then
|
||||
rm -f $HPE_LOG_DIR/stop_hpe_crons
|
||||
echo "stop_hpe_crons file has been removed." >> $hpefnm
|
||||
echo "HPE radar data processes should now restart via cron " >> $hpefnm
|
||||
|
||||
# so user will see this on the screen as well since this is a manual process
|
||||
|
||||
echo "stop_hpe_crons file has been removed."
|
||||
echo "HPE radar data processes should now restart via cron "
|
||||
|
||||
else
|
||||
echo "stop_hpe_crons file does not exist. " >> $hpefnm
|
||||
echo "stop_hpe_crons file does not exist. "
|
||||
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# start_hmap_mpe
|
||||
|
||||
# This allows this script to be run from outside of ./standard/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# Export the value of FXA_HOME here. This allows read_env.sh to be
|
||||
# sourced before set_hydro_env. This prevents set_hydro_env environment
|
||||
# and token settings from being overwritten by read_env.sh.
|
||||
export FXA_HOME=/awips/fxa
|
||||
|
||||
# Set up the AWIPS environment.
|
||||
. $FXA_HOME/readenv.sh
|
||||
|
||||
# Set up SOME environmental variables required by hmap_mpe.
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export WHFS_BIN_DIR=$(get_apps_defaults whfs_bin_dir)
|
||||
export DB_NAME=$(get_apps_defaults db_name)
|
||||
|
||||
XAPPLRESDIR=$RUN_FROM_DIR
|
||||
export XAPPLRESDIR
|
||||
|
||||
# for calling Java (for the ColorManager in this case) from withing a C program
|
||||
# already added LD_LIBRARY_PATH to set_hydro_env
|
||||
export CLASSPATH=$DB_DRIVER_PATH:$WHFS_BIN_DIR/timeserieslite.jar:$WHFS_BIN_DIR/MiscDialogs.jar
|
||||
|
||||
xterm -T mpe_editor$OS_SUFFIX -iconic -n mpe_editor \
|
||||
-e $RUN_FROM_DIR/mpe_editor$OS_SUFFIX -d$DB_NAME &
|
|
@ -1,101 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# =========================================================================
|
||||
# start_process_dpafiles - script to start up the process_dpafiles process
|
||||
# =========================================================================
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS Team applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export DPA_PROD_DIR=$(get_apps_defaults dpa_prod_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
export SHEFDECODE_USERID=$(get_apps_defaults shefdecode_userid)
|
||||
export DPA_ERROR_DIR=$(get_apps_defaults dpa_error_dir)
|
||||
export DPA_LOG_DIR=$(get_apps_defaults dpa_log_dir)
|
||||
export WHFS_BIN_DIR=$(get_apps_defaults whfs_bin_dir)
|
||||
|
||||
# First check to see if the user trying to start process is
|
||||
# allowed to do it
|
||||
|
||||
if [[ $SHEFDECODE_USERID = "" ]]
|
||||
then
|
||||
print The user allowed to start the decoders has not been assigned.
|
||||
print " "Please assign a user id to the 'shefdecode_userid'
|
||||
print " "token for the get_apps_defaults process.
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if [ $LOGNAME != $SHEFDECODE_USERID ]
|
||||
then
|
||||
print User $LOGNAME is not allowed to start process_dpafiles.
|
||||
print " "Only user $SHEFDECODE_USERID can start this process.
|
||||
exit -2
|
||||
fi
|
||||
|
||||
|
||||
# get the directory specifications
|
||||
|
||||
DATA_DIR=$DPA_PROD_DIR
|
||||
LOG_DIR=$DPA_LOG_DIR
|
||||
ERR_DIR=$DPA_ERROR_DIR
|
||||
BIN_DIR1=$PPROC_BIN
|
||||
BIN_DIR2=$WHFS_BIN_DIR
|
||||
|
||||
# Check to see if directories exist
|
||||
|
||||
if [[ ! -d $DATA_DIR ]]
|
||||
then
|
||||
print "Directory specified for data files does not exist " $DATA_DIR
|
||||
exit -3
|
||||
fi
|
||||
|
||||
if [[ ! -d $LOG_DIR ]]
|
||||
then
|
||||
print "Directory specified for log files does not exist " $LOG_DIR
|
||||
exit -4
|
||||
fi
|
||||
|
||||
if [[ ! -d $ERR_DIR ]]
|
||||
then
|
||||
print "Directory specified for error files does not exist " $ERR_DIR
|
||||
exit -5
|
||||
fi
|
||||
|
||||
if [[ ! -d $BIN_DIR1 ]]
|
||||
then
|
||||
print "Directory specified for executables does not exist " $BIN_DIR1
|
||||
exit -6
|
||||
fi
|
||||
|
||||
if [[ ! -d $BIN_DIR2 ]]
|
||||
then
|
||||
print "Directory specified for executables does not exist " $BIN_DIR2
|
||||
exit -7
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# define the log file name
|
||||
|
||||
LOG_NAME=`date -u +process_dpa_log_%m%d`
|
||||
LOG_FILE=$LOG_DIR/$LOG_NAME
|
||||
|
||||
|
||||
# Announce startup of process_dpafiles
|
||||
|
||||
print ======================================================== >> $LOG_FILE
|
||||
print Starting process_dpafiles at $(date -u) >> $LOG_FILE
|
||||
print ======================================================== >> $LOG_FILE
|
||||
print " " >> $LOG_FILE
|
||||
|
||||
|
||||
# Remove stop flag file if it exists
|
||||
|
||||
rm -f $DATA_DIR/stop_dpadecode
|
||||
|
||||
|
||||
# Start script in background
|
||||
# note that ihe script requires arguments
|
||||
|
||||
$BIN_DIR1/process_dpafiles $DATA_DIR $LOG_DIR $ERR_DIR $BIN_DIR1 $BIN_DIR2 >> $LOG_FILE &
|
|
@ -1,110 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# ============================================================
|
||||
# stop_gage_pp - script to stop the gage_pp process
|
||||
#
|
||||
# This script will return one of the following completion codes
|
||||
# to the operating system indicating success or failure of its
|
||||
# attempt to stop the Gage Precip Processor.
|
||||
#
|
||||
# 0 The attempt to stop the Gage Precip Processor was successful.
|
||||
# 1 An invalid command line argument has been passed into this script.
|
||||
# 2 The gage_pp_userid token is not defined.
|
||||
# 3 The user is not allowed to stop the Gage Precip Processor.
|
||||
# 4 The directory specified for the data files does not exist.
|
||||
# 5 The Gage Precip Process is not running. There is no process to stop.
|
||||
# 6 The Gage Precip Processor log directory has not been defined.
|
||||
# 7 The Gage Precip Processor cannot be stopped from this system.
|
||||
# Check the gage_pp_host token for the valid system name.
|
||||
#
|
||||
# Revision History
|
||||
# Moria Shebsovich July 7, 2004 Created.
|
||||
# ============================================================
|
||||
#
|
||||
# This allows you to call this script from outside of ./bin
|
||||
HOSTNAME=`hostname`
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
#
|
||||
# set up SOME environment variables for AWIPS Team applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export GAGE_PP_USERID=$(get_apps_defaults gage_pp_userid)
|
||||
export GAGE_PP_HOST=$(get_apps_defaults gage_pp_host)
|
||||
export GAGE_PP_DATA=$(get_apps_defaults gage_pp_data)
|
||||
export GAGE_PP_LOG=$(get_apps_defaults gage_pp_log)
|
||||
|
||||
GAGE_PP_LOG_DIR=$GAGE_PP_LOG
|
||||
if [[ ! -d $GAGE_PP_LOG_DIR ]]
|
||||
then
|
||||
print "Directory specified for log files does not exist " $GAGE_PP_LOG_DIR
|
||||
exit 6
|
||||
fi
|
||||
|
||||
#
|
||||
# define the Gage Precip Processor daily log file name
|
||||
LOGFILE=`date +gage_pp_log_%m%d`
|
||||
GAGE_PP_LOG_FILE=$GAGE_PP_LOG_DIR/$LOGFILE
|
||||
|
||||
# Check to see which system the Gage Precip Processor is allowed
|
||||
# to be stopped from.
|
||||
# This is specified by the gage_pp_host token which is represented
|
||||
# by the GAGE_PP_HOST variable set in set_hydro_env.
|
||||
# If this script is not being run on that system, then log an error message.
|
||||
|
||||
COMPARISON_STRING=`echo $HOSTNAME $GAGE_PP_HOST | awk -F' ' 'match ($1, $2)'`
|
||||
if test -z "$COMPARISON_STRING"
|
||||
then
|
||||
|
||||
print "Gage Precip Processor cannot be stopped from '$HOSTNAME'" \
|
||||
"It can only be stopped from '$GAGE_PP_HOST'." >> $GAGE_PP_LOG_FILE
|
||||
exit 7
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# Check to see if the user trying to stop process is the one
|
||||
# allowed to do it
|
||||
|
||||
if [[ $GAGE_PP_USERID = "" ]]
|
||||
then
|
||||
print stop_gage_pp: User allowed to stop gage_pp has not \
|
||||
been assigned. >> $GAGE_PP_LOG_FILE
|
||||
print " "Please assign a user id to the \
|
||||
'gage_pp_userid' >> $GAGE_PP_LOG_FILE
|
||||
print " "token for the get_apps_defaults process.
|
||||
>> $GAGE_PP_LOG_FILE
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if [ $LOGNAME != $GAGE_PP_USERID ]
|
||||
then
|
||||
print stop_gage_pp: User $LOGNAME is not allowed to stop \
|
||||
Gage Precip Processor. >> $GAGE_PP_LOG_FILE
|
||||
print " "Only user $GAGE_PP_USERID can stop \
|
||||
Gage Precip Processor! >> $GAGE_PP_LOG_FILE
|
||||
exit 3
|
||||
fi
|
||||
|
||||
# Check to determine if the Gage Precip Processor is running before
|
||||
# an attempt is made to stop it.
|
||||
|
||||
export PIDFILE=$GAGE_PP_DATA/gage_pp_pid.dat
|
||||
export FILEPID=`cat $PIDFILE`
|
||||
|
||||
if [[ $FILEPID != "" ]]
|
||||
then
|
||||
ps -p $FILEPID | grep gage_pp > /dev/null 2>&1
|
||||
if [ $? -eq 1 ]
|
||||
then
|
||||
print "stop_gage_pp: GagePP is not running." >> $GAGE_PP_LOG_FILE
|
||||
exit 5
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# touch the stop file, which the gage_pp program checks for
|
||||
# and if it exists, then it initiates an orderly abort of the system
|
||||
|
||||
touch $GAGE_PP_DATA/stop_gage_pp
|
||||
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
#
|
||||
# script for stopping the HPE processes
|
||||
#
|
||||
# This introduces a delayed stop of 20 min to the hpe processes
|
||||
# However, note that the DHRgather script executed via cron should
|
||||
# be stopped first. Otherwise, at the next precipitation report
|
||||
# The HPE processes will be started again.
|
||||
#
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30, 2007
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
|
||||
|
||||
|
||||
# this script uses the following set_hydro_env variables:
|
||||
# HPE_LOG_DIR
|
||||
#
|
||||
|
||||
# GAD_DIR is directory which holds the gad program (get_apps_defaults.LX)
|
||||
GAD_DIR=/awips/hydroapps/public/bin
|
||||
|
||||
# setup log files
|
||||
#
|
||||
|
||||
if [[ ! -d $HPE_LOG_DIR ]]
|
||||
then
|
||||
print "Directory specified for placing stop file does not exist " $HPE_LOG_DIR
|
||||
exit -1
|
||||
fi
|
||||
|
||||
hpefnm=$HPE_LOG_DIR/start_hpe_log
|
||||
hpetmp=$HPE_LOG_DIR/start_hpe_tmp
|
||||
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
|
||||
if [[ -s $hpefnm ]]
|
||||
then
|
||||
tail -1200 $hpefnm > $hpetmp
|
||||
mv $hpetmp $hpefnm
|
||||
fi
|
||||
|
||||
echo "Attempting to stop start_hpe... " >> $hpefnm
|
||||
|
||||
phpe=`ps -ef|grep start_hpe|grep -v grep|awk '{print $2}'`
|
||||
|
||||
|
||||
|
||||
if [[ -z "$phpe" ]]
|
||||
then
|
||||
echo "start_hpe not running, stop not required " >> $hpefnm
|
||||
else
|
||||
if [[ ! -r $HPE_LOG_DIR/stop_hpe_process ]]
|
||||
then
|
||||
touch $HPE_LOG_DIR/stop_hpe_process
|
||||
echo "stop_hpe_process file has been created." >> $hpefnm
|
||||
echo "HPE process will stop after 20 min unless new precip is detected. " >> $hpefnm
|
||||
echo "Use kill -9 (pid of start_hpe) if immediate shutdown is needed. " >> $hpefnm
|
||||
else
|
||||
echo "stop_hpe_process file exists already. " >> $hpefnm
|
||||
echo "HPE processes will end in less than 20 min unless new precip is detected. " >> $hpefnm
|
||||
echo "Use kill -9 (pid of start_hpe) if immediate shutdown is needed. " >> $hpefnm
|
||||
fi
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -1,74 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
#
|
||||
# script for bypassing the HPE radar gather crons
|
||||
#
|
||||
# HPE crons to gather radar information run every minute
|
||||
# This script just creates a file the crons are looking for
|
||||
# and if found, it will exit the gather script before beginning.
|
||||
#
|
||||
# David T. Miller RSIS OHD/HSEB
|
||||
# October 30, 2007
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export HPE_LOG_DIR=$(get_apps_defaults hpe_log_dir)
|
||||
|
||||
|
||||
|
||||
# this script uses the following set_hydro_env variables:
|
||||
# HPE_LOG_DIR
|
||||
#
|
||||
|
||||
# GAD_DIR is directory which holds the gad program (get_apps_defaults.LX)
|
||||
GAD_DIR=/awips/hydroapps/public/bin
|
||||
|
||||
# setup log files
|
||||
#
|
||||
|
||||
if [[ ! -d $HPE_LOG_DIR ]]
|
||||
then
|
||||
print "Directory specified for placing stop_hpe_crons file does not exist " $HPE_LOG_DIR
|
||||
exit -1
|
||||
fi
|
||||
|
||||
hpefnm=$HPE_LOG_DIR/start_hpe_log
|
||||
hpetmp=$HPE_LOG_DIR/start_hpe_tmp
|
||||
|
||||
|
||||
#
|
||||
# save latest entries in log file
|
||||
#
|
||||
|
||||
|
||||
if [[ -s $hpefnm ]]
|
||||
then
|
||||
tail -1200 $hpefnm > $hpetmp
|
||||
mv $hpetmp $hpefnm
|
||||
fi
|
||||
|
||||
echo "Attempting to create stop_hpe_crons file... " >> $hpefnm
|
||||
|
||||
if [[ ! -r $HPE_LOG_DIR/stop_hpe_crons ]]
|
||||
then
|
||||
touch $HPE_LOG_DIR/stop_hpe_crons
|
||||
echo "stop_hpe_crons file has been created." >> $hpefnm
|
||||
echo "HPE radar data processes will not be executed via cron until this file " >> $hpefnm
|
||||
echo "removed by the start_hpe_crons script. " >> $hpefnm
|
||||
|
||||
# so user sees it on the screen as well since this is a manual process
|
||||
|
||||
echo "stop_hpe_crons file has been created."
|
||||
echo "HPE radar data processes will not be executed via cron until this file "
|
||||
echo "removed by the start_hpe_crons script. "
|
||||
|
||||
else
|
||||
echo "stop_hpe_crons file exists already. " >> $hpefnm
|
||||
echo "stop_hpe_crons file exists already. "
|
||||
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -1,44 +0,0 @@
|
|||
#!/bin/ksh
|
||||
# ====================================================================
|
||||
# stop_process_dpafiles - script to stop the process_dpafiles process
|
||||
# ====================================================================
|
||||
|
||||
# This allows you to call this script from outside of ./bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# set up SOME environment variables for AWIPS Team applications
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export DPA_PROD_DIR=$(get_apps_defaults dpa_prod_dir)
|
||||
export SHEFDECODE_USERID=$(get_apps_defaults shefdecode_userid)
|
||||
|
||||
# First check to see if the user trying to stop process is the one
|
||||
# allowed to do it. note that it uses the same token as shefdecode
|
||||
# Error if user id is not the one to do it or no allowed id has been
|
||||
# assigned
|
||||
|
||||
if [[ $SHEFDECODE_USERID = "" ]]
|
||||
then
|
||||
print The user allowed to stop the decoders has not been assigned.
|
||||
print " "Please assign a user id to the 'shefdecode_userid'
|
||||
print " "token for the get_apps_defaults process.
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if [ $LOGNAME != $SHEFDECODE_USERID ]
|
||||
then
|
||||
print User $LOGNAME is not allowed to stop process_dpafiles.
|
||||
print " "Only user $SHEFDECODE_USERID can issue this command.
|
||||
exit -2
|
||||
fi
|
||||
|
||||
|
||||
# touch the stop file, which the process script checks for
|
||||
# and if it exists, then it initiates an orderly abort of the process.
|
||||
|
||||
if [[ ! -d $DPA_PROD_DIR ]]
|
||||
then
|
||||
print "Directory specified for placing stop file does not exist " $DPA_PROD_DIR
|
||||
exit -3
|
||||
fi
|
||||
|
||||
touch $DPA_PROD_DIR/stop_dpadecode
|
|
@ -1,29 +0,0 @@
|
|||
-- gather distinct lid,ts combinations from temperature table where extremum='X'
|
||||
|
||||
select lid, ts
|
||||
into temp temptable
|
||||
from temperature
|
||||
where extremum='X'
|
||||
group by 1,2
|
||||
order by lid
|
||||
;
|
||||
|
||||
-- add info from location table for the lid
|
||||
|
||||
select
|
||||
l.lid,
|
||||
' TAI'||t.ts||'XZ',
|
||||
to_char(l.lat,'99.99'),
|
||||
to_char(l.lon,'999.99'),
|
||||
case
|
||||
when l.elev >= 0 then to_char(l.elev,'99999')
|
||||
else ' -999'
|
||||
end,
|
||||
'9',
|
||||
case
|
||||
when l.name ISNULL then 'XXXXXXXXXX'
|
||||
else l.name
|
||||
end
|
||||
from temptable t, location l
|
||||
where t.lid=l.lid
|
||||
order by l.lid asc;
|
|
@ -1,133 +0,0 @@
|
|||
#!/bin/ksh
|
||||
|
||||
###############################################################################
|
||||
# transmit_rfc_bias
|
||||
#
|
||||
# This script creates a rfc bias message and transmits it across the WAN.
|
||||
#
|
||||
# The office must set the PRODUCT_ID and RECIPIENTS variables at the
|
||||
# beginning of this script. The PRODUCT_ID is the 10 character WMO id -
|
||||
# CCCCNNNXXX. The product category (NNN) should be left as "RRM" indicating
|
||||
# miscellaneous hydrologic data. For example for MARFC, the WMO id could
|
||||
# be set as "KRHARRMRHA".
|
||||
#
|
||||
# The RECIPIENTS variable contains a comma-separated list of offices the
|
||||
# RFC would like to send the bias message to. For example for MARFC,
|
||||
# this list could be defined as:
|
||||
# RECIPIENTS="LWX,CTP,PHI,PBZ,BGM,BUF,OKX"
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# transmit_rfc_bias <YYYYMMDDHH>
|
||||
# YYYY is the year
|
||||
# MM is the month
|
||||
# DD is the day
|
||||
# HH is the hour
|
||||
#
|
||||
# Logs for this script are written to:
|
||||
# /awips/hydroapps/precip_proc/local/data/log/process_bias_message
|
||||
#
|
||||
# Modification History
|
||||
# March 26, 2007 Bryon Lawrence Script Written
|
||||
# September 21, 2007 Bryon Lawrence Added -e option to distributeProduct call.
|
||||
#
|
||||
###############################################################################
|
||||
export PRODUCT_ID="CCCCRRMXXX"
|
||||
export RECIPIENTS=""
|
||||
|
||||
#
|
||||
# Export WHFS environmental variables.
|
||||
#
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
|
||||
# These lines are commented out because this script will be run
|
||||
# from mpe_editor using mpe_editor's environment. If this script
|
||||
# is run stand alone, these lines must be uncommented.
|
||||
export FXA_HOME=/awips/fxa
|
||||
. $FXA_HOME/readenv.sh
|
||||
. $RUN_FROM_DIR/../../set_hydro_env
|
||||
export RFC_BIAS_OUTPUT_DIR=$(get_apps_defaults rfc_bias_output_dir)
|
||||
export PROCESS_BIAS_LOG_DIR=$(get_apps_defaults process_bias_log_dir)
|
||||
export MPE_TRANSMIT_BIAS=$(get_apps_defaults mpe_transmit_bias)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
# set java classpath
|
||||
export CLASSPATH=$DB_DRIVER_PATH:$PPROC_BIN/bias_trans.jar
|
||||
|
||||
#
|
||||
# Open the log file to track the status of the transmission of the RFC Bias
|
||||
# Message.
|
||||
#
|
||||
transmit_log=`date +$PROCESS_BIAS_LOG_DIR/transmit_rfc_bias_%m%d`
|
||||
|
||||
echo "------------------------------------------------" >> $transmit_log
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Starting transmit_rfc_bias at $Dte" >> $transmit_log
|
||||
|
||||
#
|
||||
# Check the mpe_transmit_bias token to make sure it is on.
|
||||
#
|
||||
if [[ $MPE_TRANSMIT_BIAS = "OFF" || $MPE_TRANSMIT_BIAS = "off" ]]
|
||||
then
|
||||
echo "Token mpe_transmit_bias is off. RFC Bias Message not generated."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$RECIPIENTS" ]]
|
||||
then
|
||||
echo "No recipients specified in transmit_rfc_bias script." >> $transmit_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Create the RFC Bias message.
|
||||
#
|
||||
echo "Calling the bias message creating program" >> $transmit_log
|
||||
echo "${SYS_JAVA_DIR}/bin/java/ ohd.hseb.bias_trans/BiasMessageCreator $JDBCURL $1" >> $transmit_log
|
||||
${SYS_JAVA_DIR}/bin/java ohd.hseb.bias_trans/BiasMessageCreator $JDBCURL $1 \
|
||||
>> $transmit_log 2>&1
|
||||
|
||||
#
|
||||
# Call distribute product and send the RFC Bias Message across the WAN.
|
||||
#
|
||||
FILENAME=${FXA_LOCAL_SITE}${1}z
|
||||
FILEPATH=$RFC_BIAS_OUTPUT_DIR/$FILENAME
|
||||
|
||||
#
|
||||
# Check to make sure this file exists.
|
||||
if [[ ! -a $FILEPATH ]]
|
||||
then
|
||||
echo "$FILEPATH does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SUBJECT="$FILENAME $PRODUCT_ID RADAR_PRECIP_BIAS"
|
||||
|
||||
#
|
||||
# Call distributeProduct for the recipients.
|
||||
echo "Sending file:$FILENAME product_ID:$PRODUCT_ID to '$RECIPIENTS' via "\
|
||||
"distributeProduct" >> $transmit_log
|
||||
echo "distributeProduct -c RADAR_PRECIP_BIAS -s\"$SUBJECT\" "\
|
||||
"-a\"$RECIPIENTS\" -e $FILEPATH $PRODUCT_ID dummy" >> $transmit_log
|
||||
/awips/fxa/bin/distributeProduct -c RADAR_PRECIP_BIAS -s "$SUBJECT" -a \
|
||||
"$RECIPIENTS" -e $FILEPATH $PRODUCT_ID \
|
||||
dummy >> $transmit_log 2>&1
|
||||
|
||||
#
|
||||
# Test the exit status of distributeProduct.
|
||||
#
|
||||
if [[ $? -ne 0 ]]
|
||||
then
|
||||
echo "The call to distributeProduct failed." >> $transmit_log
|
||||
exit 1
|
||||
else
|
||||
echo "The call to distributeProduct was successful." >> $transmit_log
|
||||
fi
|
||||
|
||||
#
|
||||
# Remove the RFC bias message
|
||||
rm -f $FILEPATH >> $transmit_log 2>&1
|
||||
|
||||
# End of script.
|
||||
#
|
|
@ -1,186 +0,0 @@
|
|||
#!/bin/ksh
|
||||
###############################################################################
|
||||
# transmit_rfc_qpe
|
||||
#
|
||||
# This script transmits the 1 hour MPE generated Best Estimate QPE mosaics
|
||||
# over the SBN. Normally, these QPE products will be created at an RFC
|
||||
# and sent over the SBN for WFOs to use in their daily operations. These
|
||||
# products are displayable through MPE Editor and Hydroview, and they
|
||||
# can be used perationally in applications like the Site Specific Head Water
|
||||
# Model.
|
||||
#
|
||||
# This script is called by MPE Fieldgen when run from the cron and
|
||||
# MPE Editor from the Transmit Best Estimate QPE option on the MPE Control
|
||||
# menu.
|
||||
#
|
||||
# The mpe_send_qpe_to_sbn token must be set to "ON" and the mpe_save_grib token
|
||||
# must be set to "save" for this script to function. Also, the section
|
||||
# of this script which calls distributeProduct must be uncommented.
|
||||
#
|
||||
# Example of PRODUCT ID: KALRQPEBIN
|
||||
# Where NNN should be QPE
|
||||
# Where XXX should be BIN
|
||||
# Where CCCC should be:
|
||||
#
|
||||
# PACR : APRFC
|
||||
# KALR : SERFC
|
||||
# KTAR : NERFC
|
||||
# KTIR : OHRFC
|
||||
# KFWR : WGRFC
|
||||
# KKRF : MBRFC
|
||||
# KMSR : NCRFC
|
||||
# KORN : LMRFC
|
||||
# KTUA : ABRFC
|
||||
# KPTR : NWRFC
|
||||
# KRHA : MARFC
|
||||
# KRSA : CNRFC
|
||||
# KSTR : CBRFC
|
||||
#
|
||||
# Modification History
|
||||
# June 1, 2006 Bryon Lawrence Original Coding
|
||||
# November 8, 2006 Bryon Lawrence Updated to read GRIB
|
||||
# files from qpe_grib_sbn
|
||||
# directory. Removed
|
||||
# logic using touch file.
|
||||
###############################################################################
|
||||
|
||||
#
|
||||
# The product id must be locally defined for the office.
|
||||
#
|
||||
export PRODUCT_ID=CCCCQPEBIN
|
||||
|
||||
export FXA_HOME=/awips/fxa
|
||||
|
||||
#
|
||||
# Set up the D2D environment...
|
||||
. $FXA_HOME/readenv.sh
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../../set_hydro_env
|
||||
export GAQ_LOG_DIR=$(get_apps_defaults gaq_log_dir)
|
||||
export MPE_SEND_QPE_TO_SBN=$(get_apps_defaults mpe_send_qpe_to_sbn)
|
||||
export MPE_SAVE_GRIB=$(get_apps_defaults mpe_save_grib)
|
||||
export MPE_QPE_GRIB_SBN_DIR=$(get_apps_defaults mpe_qpe_grib_sbn_dir)
|
||||
|
||||
#
|
||||
# Open the transmit_rfc_qpe log file.
|
||||
#
|
||||
transmit_qpe_log=`date +$GAQ_LOG_DIR/transmit_rfc_qpe_%m%d`
|
||||
|
||||
#
|
||||
# Save only the latest information in the log file.
|
||||
echo "-------------------------------------------" >> $transmit_qpe_log
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Starting transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
|
||||
#
|
||||
# Check to make sure that the mpe_save_grib token is set to 'save'
|
||||
#
|
||||
if [[ $MPE_SAVE_GRIB != "save" && $MPE_SAVE_GRIB != "SAVE" ]]
|
||||
then
|
||||
echo "The mpe_save_grib token is not set to save." >> transmit_qpe_log
|
||||
echo "No QPE files transmitted." >> transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Check the mpe_send_qpe_to_sbn token to make sure that the office
|
||||
# wants to send the 1 HR QPE over the SBN.
|
||||
#
|
||||
if [[ $MPE_SEND_QPE_TO_SBN != "ON" && $MPE_SEND_QPE_TO_SBN != "on" ]]
|
||||
then
|
||||
echo "The mpe_send_qpe_to_sbn token is not set to 'ON'." >> \
|
||||
$transmit_qpe_log
|
||||
echo "No QPE files transmitted." >> $transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Determine the list of QPE grib files to transmit over the SBN
|
||||
# Any files in the qpe_grib_sbn directory need to be sent over the SBN.
|
||||
# Files are removed from this directory after each successful call to
|
||||
# distributeProduct.
|
||||
#
|
||||
echo "Searching directory $MPE_QPE_GRIB_SBN_DIR for grib " >> $transmit_qpe_log
|
||||
echo "files to transmit." >> $transmit_qpe_log
|
||||
|
||||
grib_file_list=`ls $MPE_QPE_GRIB_SBN_DIR/*.grib` >> $transmit_qpe_log 2>&1
|
||||
|
||||
if [[ $? -ne 0 ]]
|
||||
then
|
||||
echo "An error was encountered while searching for grib files in" >> \
|
||||
$transmit_qpe_log
|
||||
echo "the $MPE_QPE_GRIB_SBN_DIR directory." >> \
|
||||
$transmit_qpe_log
|
||||
echo "No QPE files transmitted." >> $transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Check if there are any files to process.
|
||||
#
|
||||
if [[ -z "$grib_file_list" ]]
|
||||
then
|
||||
echo "No grib files found to process." >> $transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 0
|
||||
fi
|
||||
|
||||
#
|
||||
# Loop over the grib files
|
||||
#
|
||||
for i in $grib_file_list
|
||||
do
|
||||
echo "Processing grib file $i." >> $transmit_qpe_log
|
||||
|
||||
#
|
||||
# The GRIB files normally created by the process_grib_files
|
||||
# script have a subcenter code of 0. GRIB files with a subcenter
|
||||
# code representing the tranmitting RFC need to be created.
|
||||
#
|
||||
|
||||
#
|
||||
# Send product using distributeProduct
|
||||
# The GRIB file is sent to the DEFAULTNCF. Upon arrival at the NCF
|
||||
# the GRIB file is placed on the SBN uplink. The GRIB file is sent
|
||||
# as an attachment to the call to distributeProduct. The dummy file
|
||||
# does not exist. It is just a dummy argument.
|
||||
#
|
||||
echo "Sending file: $i product ID $PRODUCT_ID to distributeProduct" >> \
|
||||
$transmit_qpe_log
|
||||
echo "/awips/fxa/bin/distributeProduct -a DEFAULTNCF -e $i $PRODUCT_ID dummy" >> \
|
||||
$transmit_qpe_log
|
||||
/awips/fxa/bin/distributeProduct -a DEFAULTNCF -e $i $PRODUCT_ID dummy >> \
|
||||
$transmit_qpe_log 2>&1
|
||||
|
||||
if [[ $? -eq 0 ]]
|
||||
then
|
||||
echo "The call to distributeProduct was successful." >> $transmit_qpe_log
|
||||
echo "Removing grib file $i." >> $transmit_qpe_log
|
||||
rm -f $i >> $transmit_qpe_log 2>&1
|
||||
else
|
||||
echo "The call to distrbuteProduct failed with code $?."
|
||||
echo "Grib file $i not removed."
|
||||
fi
|
||||
|
||||
#
|
||||
done
|
||||
|
||||
#
|
||||
# This script can be locally configured here.
|
||||
|
||||
#
|
||||
#
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
|
||||
#
|
||||
exit 0
|
|
@ -1,2 +0,0 @@
|
|||
#!/bin/ksh
|
||||
echo "Test of mpe_internal_script" >/tmp/internal_script_test
|
|
@ -1,189 +0,0 @@
|
|||
#!/bin/ksh
|
||||
###############################################################################
|
||||
# process_qpe_mosaic
|
||||
#
|
||||
# This script grib encodes the RFC QPE mosaics and sends them to awips for
|
||||
# display in D2D.
|
||||
#
|
||||
# In order for the generate_areal_qpe (GAQ) program to run and produce a
|
||||
# mosaicked RFC QPE product, the mpe_generate_areal_qpe token must be set
|
||||
# to "ON".
|
||||
#
|
||||
# In order for the RFC QPE product to be sent to D2D, the mpe_d2d_display_grib
|
||||
# token must be set to "ON".
|
||||
#
|
||||
# The RFC and DURATION list variables (below) must be locally defined at
|
||||
# each office.
|
||||
#
|
||||
# Modification History
|
||||
# June 6, 2006 Bryon Lawrence Script Written
|
||||
# September 12, 2006 Bryon Lawrence Setup to source readenv.sh
|
||||
# November 7, 2006 Bryon Lawrence Modified to set the
|
||||
# grib_set_subcenter_0 token to 'on'.
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
# These must be defined for the office GAQ is running at.
|
||||
# The RFC_LIST will contain all of the RFCs which cover all or part
|
||||
# of the office's MPE forecast area. The DURATION_LIST will contain
|
||||
# the durations to create RFC QPE products for. The acceptable durations
|
||||
# are 1,6,24.
|
||||
RFC_LIST=MARFC,OHRFC,LMRFC
|
||||
DURATION_LIST=1,6,24
|
||||
|
||||
# Export the value of FXA_HOME here. This allows read_env.sh to be
|
||||
# sourced before set_hydro_env. This prevents set_hydro_env environment
|
||||
# and token settings from being overwritten by read_env.sh.
|
||||
export FXA_HOME=/awips/fxa
|
||||
|
||||
# Set up the AWIPS environment.
|
||||
. $FXA_HOME/readenv.sh
|
||||
|
||||
# Allow this script to be run from outside of ./precip_proc/bin
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../../set_hydro_env
|
||||
export grib_bin_dir=$(get_apps_defaults pproc_bin)
|
||||
export grib_out_dir=$(get_apps_defaults mpe_grib_dir)
|
||||
export D2D_INPUT_DIR=$(get_apps_defaults d2d_input_dir)
|
||||
export MPE_D2D_DISPLAY_GRIB=$(get_apps_defaults mpe_d2d_display_grib)
|
||||
export MPE_GENERATE_AREAL_QPE=$(get_apps_defaults mpe_generate_areal_qpe)
|
||||
export GAQ_LOG_DIR=$(get_apps_defaults gaq_log_dir)
|
||||
export GAQ_TEMP_XMRG_DIR=$(get_apps_defaults gaq_temp_xmrg_dir)
|
||||
export PPROC_BIN=$(get_apps_defaults pproc_bin)
|
||||
|
||||
#
|
||||
# Open the process qpe mosaic log.
|
||||
#
|
||||
process_qpe_log=`date +$GAQ_LOG_DIR/process_qpe_mosaic_%m%d`
|
||||
|
||||
#
|
||||
# Save only the latest information in the log file.
|
||||
echo "-----------------------------------------------" >> $process_qpe_log
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Starting process_qpe_mosaic at $Dte" >> $process_qpe_log
|
||||
|
||||
echo "Token mpe_generate_areal_qpe is defined as $MPE_GENERATE_AREAL_QPE" >> \
|
||||
$process_qpe_log
|
||||
echo "Token mpe_d2d_display_grib is defined as $MPE_D2D_DISPLAY_GRIB" >> \
|
||||
$process_qpe_log
|
||||
|
||||
#
|
||||
# If the mpe_generate_areal_qpe token is not on, then do not generate
|
||||
# QPE Mosaics.
|
||||
#
|
||||
if [[ $MPE_GENERATE_AREAL_QPE != "ON" && $MPE_GENERATE_AREAL_QPE != "on" ]]
|
||||
then
|
||||
echo "Token mpe_generate_areal_qpe must be set to 'ON'." >> $process_qpe_log
|
||||
echo "RFC QPE mosaics not created." >> $process_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Call the StoreHydroGrids program to ungrib the QPE GRIB messages stored in
|
||||
# the /data/fxa/Grid/SBN/HydroRaw directory and place the resulting netCDF
|
||||
# files into the /data/fxa/GRID/SBN/netCDF/HRAP/QPE and
|
||||
# /data/fxa/Grid/SBN/HydroBad directories.
|
||||
#
|
||||
echo "Invoking $FXA_BIN_DIR/StoreHydroGrids" >> $process_qpe_log
|
||||
$FXA_BIN_DIR/StoreHydroGrids >> $process_qpe_log 2>&1
|
||||
|
||||
#
|
||||
# Call the gen_areal_qpe program
|
||||
#
|
||||
$PPROC_BIN/gen_areal_qpe.LX -r$RFC_LIST -h$DURATION_LIST >> \
|
||||
$process_qpe_log 2>&1
|
||||
|
||||
#
|
||||
# Retrieve a list of the files in the temporary QPE xmrg directory.
|
||||
#
|
||||
echo "Searching for QPE Mosaic files in $GAQ_TEMP_XMRG_DIR" >> \
|
||||
$process_qpe_log
|
||||
|
||||
file_list=`ls $GAQ_TEMP_XMRG_DIR/RFC*` >> $process_qpe_log 2>&1
|
||||
|
||||
#
|
||||
# Only send the RFC QPE mosaics to D2D if the mpe_d2d_displag_grib
|
||||
# token is set to 'on'.
|
||||
#
|
||||
if [[ $MPE_D2D_DISPLAY_GRIB = "ON" || $MPE_D2D_DISPLAY_GRIB = "on" ]]
|
||||
then
|
||||
|
||||
if [[ -z "$file_list" ]]
|
||||
then
|
||||
echo "No QPE mosaic files found in $GAQ_TEMP_XMRG_DIR" >> \
|
||||
$process_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished process_qpe_mosaic at $Dte" >> $process_qpe_log
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
#
|
||||
# Set the directory to read the GRIB files from.
|
||||
# Also set the directory to write the QPE files to.
|
||||
#
|
||||
|
||||
export grib_in_dir=$GAQ_TEMP_XMRG_DIR
|
||||
export grib_out_dir=$GAQ_TEMP_XMRG_DIR
|
||||
#
|
||||
# Force the subcenter code in the GRIB message to be 0.
|
||||
# This will allow the QPE products contained within the GRIB
|
||||
# messages to be displayed in the local D2D.
|
||||
#
|
||||
export grib_set_subcenter_0=on
|
||||
|
||||
for i in $file_list
|
||||
do
|
||||
#
|
||||
# Build the input and output paths for GRIBIT.
|
||||
#
|
||||
export grib_in_file=`basename $i`
|
||||
export grib_out_file=$grib_in_file.grib
|
||||
|
||||
echo "Grib in file: $grib_in_file" >> $process_qpe_log
|
||||
echo "Grib out file: $grib_out_file" >> $process_qpe_log
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Invoking $grib_bin_dir/gribit.LX at $Dte" >> $process_qpe_log
|
||||
$grib_bin_dir/gribit.LX >> $process_qpe_log 2>&1
|
||||
#
|
||||
# Move the grib file to the SBN/Raw directory
|
||||
#
|
||||
new_string=`date -u +%d%H%M%S`
|
||||
new_file_name=${grib_out_file%.*}_$new_string.grib
|
||||
|
||||
echo "Move and rename grib file $grib_out_dir/$grib_out_file" >> \
|
||||
$process_qpe_log
|
||||
echo "to $D2D_INPUT_DIR/$new_file_name" >> $process_qpe_log
|
||||
mv $grib_out_dir/$grib_out_file $D2D_INPUT_DIR/$new_file_name >> \
|
||||
$process_qpe_log 2>&1
|
||||
|
||||
if [[ $? -ne 0 ]]
|
||||
then
|
||||
# The move failed. Remove the grib file.
|
||||
rm -f $grib_out_dir/$grib_out_file >> $process_qpe_log 2>&1
|
||||
fi
|
||||
|
||||
#
|
||||
# Notify D2D about the grib file.
|
||||
#
|
||||
echo "Invoking $FXA_BIN_DIR/notif_mpe.csh" >> $process_qpe_log
|
||||
$FXA_BIN_DIR/notif_mpe.csh >> $process_qpe_log 2>&1
|
||||
|
||||
done
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# Remove the files from the GAQ output XMRG directory.
|
||||
#
|
||||
rm -f $file_list >> $process_qpe_log 2>&1
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Finished process_qpe_mosaic at $Dte" >> $process_qpe_log
|
||||
|
||||
exit 0
|
||||
|
||||
#
|
||||
# End of process_qpe_mosaic script
|
||||
#
|
|
@ -1,186 +0,0 @@
|
|||
#!/bin/ksh
|
||||
###############################################################################
|
||||
# transmit_rfc_qpe
|
||||
#
|
||||
# This script transmits the 1 hour MPE generated Best Estimate QPE mosaics
|
||||
# over the SBN. Normally, these QPE products will be created at an RFC
|
||||
# and sent over the SBN for WFOs to use in their daily operations. These
|
||||
# products are displayable through MPE Editor and Hydroview, and they
|
||||
# can be used perationally in applications like the Site Specific Head Water
|
||||
# Model.
|
||||
#
|
||||
# This script is called by MPE Fieldgen when run from the cron and
|
||||
# MPE Editor from the Transmit Best Estimate QPE option on the MPE Control
|
||||
# menu.
|
||||
#
|
||||
# The mpe_send_qpe_to_sbn token must be set to "ON" and the mpe_save_grib token
|
||||
# must be set to "save" for this script to function. Also, the section
|
||||
# of this script which calls distributeProduct must be uncommented.
|
||||
#
|
||||
# Example of PRODUCT ID: KALRQPEBIN
|
||||
# Where NNN should be QPE
|
||||
# Where XXX should be BIN
|
||||
# Where CCCC should be:
|
||||
#
|
||||
# PACR : APRFC
|
||||
# KALR : SERFC
|
||||
# KTAR : NERFC
|
||||
# KTIR : OHRFC
|
||||
# KFWR : WGRFC
|
||||
# KKRF : MBRFC
|
||||
# KMSR : NCRFC
|
||||
# KORN : LMRFC
|
||||
# KTUA : ABRFC
|
||||
# KPTR : NWRFC
|
||||
# KRHA : MARFC
|
||||
# KRSA : CNRFC
|
||||
# KSTR : CBRFC
|
||||
#
|
||||
# Modification History
|
||||
# June 1, 2006 Bryon Lawrence Original Coding
|
||||
# November 8, 2006 Bryon Lawrence Updated to read GRIB
|
||||
# files from qpe_grib_sbn
|
||||
# directory. Removed
|
||||
# logic using touch file.
|
||||
###############################################################################
|
||||
|
||||
#
|
||||
# The product id must be locally defined for the office.
|
||||
#
|
||||
export PRODUCT_ID=CCCCQPEBIN
|
||||
|
||||
export FXA_HOME=/awips/fxa
|
||||
|
||||
#
|
||||
# Set up the D2D environment...
|
||||
. $FXA_HOME/readenv.sh
|
||||
|
||||
RUN_FROM_DIR=`dirname $0`
|
||||
. $RUN_FROM_DIR/../../../set_hydro_env
|
||||
export GAQ_LOG_DIR=$(get_apps_defaults gaq_log_dir)
|
||||
export MPE_SEND_QPE_TO_SBN=$(get_apps_defaults mpe_send_qpe_to_sbn)
|
||||
export MPE_SAVE_GRIB=$(get_apps_defaults mpe_save_grib)
|
||||
export MPE_QPE_GRIB_SBN_DIR=$(get_apps_defaults mpe_qpe_grib_sbn_dir)
|
||||
|
||||
#
|
||||
# Open the transmit_rfc_qpe log file.
|
||||
#
|
||||
transmit_qpe_log=`date +$GAQ_LOG_DIR/transmit_rfc_qpe_%m%d`
|
||||
|
||||
#
|
||||
# Save only the latest information in the log file.
|
||||
echo "-------------------------------------------" >> $transmit_qpe_log
|
||||
|
||||
Dte=`date -u`
|
||||
echo "Starting transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
|
||||
#
|
||||
# Check to make sure that the mpe_save_grib token is set to 'save'
|
||||
#
|
||||
if [[ $MPE_SAVE_GRIB != "save" && $MPE_SAVE_GRIB != "SAVE" ]]
|
||||
then
|
||||
echo "The mpe_save_grib token is not set to save." >> transmit_qpe_log
|
||||
echo "No QPE files transmitted." >> transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Check the mpe_send_qpe_to_sbn token to make sure that the office
|
||||
# wants to send the 1 HR QPE over the SBN.
|
||||
#
|
||||
if [[ $MPE_SEND_QPE_TO_SBN != "ON" && $MPE_SEND_QPE_TO_SBN != "on" ]]
|
||||
then
|
||||
echo "The mpe_send_qpe_to_sbn token is not set to 'ON'." >> \
|
||||
$transmit_qpe_log
|
||||
echo "No QPE files transmitted." >> $transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Determine the list of QPE grib files to transmit over the SBN
|
||||
# Any files in the qpe_grib_sbn directory need to be sent over the SBN.
|
||||
# Files are removed from this directory after each successful call to
|
||||
# distributeProduct.
|
||||
#
|
||||
echo "Searching directory $MPE_QPE_GRIB_SBN_DIR for grib " >> $transmit_qpe_log
|
||||
echo "files to transmit." >> $transmit_qpe_log
|
||||
|
||||
grib_file_list=`ls $MPE_QPE_GRIB_SBN_DIR/*.grib` >> $transmit_qpe_log 2>&1
|
||||
|
||||
if [[ $? -ne 0 ]]
|
||||
then
|
||||
echo "An error was encountered while searching for grib files in" >> \
|
||||
$transmit_qpe_log
|
||||
echo "the $MPE_QPE_GRIB_SBN_DIR directory." >> \
|
||||
$transmit_qpe_log
|
||||
echo "No QPE files transmitted." >> $transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Check if there are any files to process.
|
||||
#
|
||||
if [[ -z "$grib_file_list" ]]
|
||||
then
|
||||
echo "No grib files found to process." >> $transmit_qpe_log
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
exit 0
|
||||
fi
|
||||
|
||||
#
|
||||
# Loop over the grib files
|
||||
#
|
||||
for i in $grib_file_list
|
||||
do
|
||||
echo "Processing grib file $i." >> $transmit_qpe_log
|
||||
|
||||
#
|
||||
# The GRIB files normally created by the process_grib_files
|
||||
# script have a subcenter code of 0. GRIB files with a subcenter
|
||||
# code representing the tranmitting RFC need to be created.
|
||||
#
|
||||
|
||||
#
|
||||
# Send product using distributeProduct
|
||||
# The GRIB file is sent to the DEFAULTNCF. Upon arrival at the NCF
|
||||
# the GRIB file is placed on the SBN uplink. The GRIB file is sent
|
||||
# as an attachment to the call to distributeProduct. The dummy file
|
||||
# does not exist. It is just a dummy argument.
|
||||
#
|
||||
echo "Sending file: $i product ID $PRODUCT_ID to distributeProduct" >> \
|
||||
$transmit_qpe_log
|
||||
echo "/awips/fxa/bin/distributeProduct -a DEFAULTNCF -e $i $PRODUCT_ID dummy" >> \
|
||||
$transmit_qpe_log
|
||||
/awips/fxa/bin/distributeProduct -a DEFAULTNCF -e $i $PRODUCT_ID dummy >> \
|
||||
$transmit_qpe_log 2>&1
|
||||
|
||||
if [[ $? -eq 0 ]]
|
||||
then
|
||||
echo "The call to distributeProduct was successful." >> $transmit_qpe_log
|
||||
echo "Removing grib file $i." >> $transmit_qpe_log
|
||||
rm -f $i >> $transmit_qpe_log 2>&1
|
||||
else
|
||||
echo "The call to distrbuteProduct failed with code $?."
|
||||
echo "Grib file $i not removed."
|
||||
fi
|
||||
|
||||
#
|
||||
done
|
||||
|
||||
#
|
||||
# This script can be locally configured here.
|
||||
|
||||
#
|
||||
#
|
||||
Dte=`date -u`
|
||||
echo "Finished transmit_rfc_qpe at $Dte" >> $transmit_qpe_log
|
||||
|
||||
#
|
||||
exit 0
|
|
@ -1,14 +0,0 @@
|
|||
1.00 F4.2 RESET TIME (HRS)
|
||||
1.00 F4.2 PROJECTION INTERVAL (HRS)
|
||||
4.00 F5.2 nominal grid size (km)
|
||||
1.00 F4.2 ERROR PROPORTION FACTOR
|
||||
0.2500 F6.4 NOMINAL SCAN INTERVAL (HRS)
|
||||
0.10 F4.2 MINIMUM THRESHOLD PRECIP RATE (MM)
|
||||
0 I2 MINIMUM NUMBER OF SAMPLES
|
||||
1.0000 F6.4 Max. allowable missing period of radar data (hr) for reset
|
||||
200.0 f6.1 PDF min. area threshold (km^2)
|
||||
0.6 f6.1 PDF min. rainrate threshold (dBR)(modified to rainrate)
|
||||
n logical Account for growth/decay?
|
||||
1 I1 Storm motion vectors used (1=local, 2=uniform hrly-avg)
|
||||
0.8 f6.1 lamda (for rate smooth)
|
||||
1.3 f6.1 kappa (for rate smooth)
|
|
@ -1,8 +0,0 @@
|
|||
QPF QPF_SFC 180 61 3 4 1
|
||||
QTF Tavg6_SFC 30 11 2 3 1
|
||||
QTE Tavg6_SFC 30 11 2 0 1
|
||||
PETF PotET_SFC 30 57 3 0 2
|
||||
PETE PotET_SFC 30 57 3 0 2
|
||||
QPE qpe_grid 172 61 3 4 1
|
||||
QTE qte_grid 172 11 2 3 1
|
||||
QZE qze_grid 172 7 2 0 1
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue