+ *
+ * @author jsanchez
+ * @version 1.0
+ */
+
+public class EmergencyType {
+
+ public static final String EMER = "EMER";
+
+ private static final EmergencyType TORNADO = new EmergencyType(
+ "TORNADO EMERGENCY", "TO.W");
+
+ private static final EmergencyType FLASH_FLOOD = new EmergencyType(
+ "FLASH FLOOD EMERGENCY", "FF.W");
+
+ private final String value;
+
+ private final String phensig;
+
+ private final static EmergencyType[] values = new EmergencyType[] {
+ TORNADO, FLASH_FLOOD };
+
+ private EmergencyType(String type, String phensig) {
+ this.value = type;
+ this.phensig = phensig;
+ }
+
+ public static EmergencyType valueOf(String phensig) {
+ EmergencyType type = null;
+ for (EmergencyType t : values) {
+ if (t.phensig.equals(phensig)) {
+ type = t;
+ break;
+ }
+ }
+ return type;
+ }
+
+ /**
+ * Checks to see if the text product is an emergency product.
+ *
+ * @param rawmessage
+ * @return
+ */
+ public static boolean isEmergency(String rawmessage) {
+ for (EmergencyType type : values) {
+ if (rawmessage != null && rawmessage.contains(type.getValue())) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+}
diff --git a/edexOsgi/com.raytheon.uf.common.geospatial/src/com/raytheon/uf/common/geospatial/MapUtil.java b/edexOsgi/com.raytheon.uf.common.geospatial/src/com/raytheon/uf/common/geospatial/MapUtil.java
index f16cd33f5f..167a7bcfa5 100644
--- a/edexOsgi/com.raytheon.uf.common.geospatial/src/com/raytheon/uf/common/geospatial/MapUtil.java
+++ b/edexOsgi/com.raytheon.uf.common.geospatial/src/com/raytheon/uf/common/geospatial/MapUtil.java
@@ -94,6 +94,8 @@ import com.vividsolutions.jts.geom.Polygon;
* 06/19/2012 14988 D. Friedman Make oversampling more like AWIPS 1
* 09/18/2012 #1091 randerso corrected getBoundingEnvelope
* 11/06/2012 15406 ryu Added convertToNativeEnvelope()
+ * 08/27/2013 #2287 randerso Fixed rotation methods so it is not necessary
+ * to subtract 180 from the returned value
*
*
*
@@ -1062,12 +1064,13 @@ public class MapUtil {
* the right of UP is north (or 360) degrees.
*
* @param latLon
+ * @param spatialObject
* @return rotation angle
*/
public static double rotation(Coordinate latLon,
ISpatialObject spatialObject) {
- double newLatLonY = latLon.y + 0.05;
+ double newLatLonY = latLon.y - 0.05;
if (newLatLonY > 90) {
newLatLonY -= 180;
}
@@ -1098,11 +1101,12 @@ public class MapUtil {
* the right of UP is north (or 360) degrees.
*
* @param latLon
+ * @param geometry
* @return rotation angle
*/
public static double rotation(Coordinate latLon, GridGeometry2D geometry) {
- double newLatLonY = latLon.y + 0.05;
+ double newLatLonY = latLon.y - 0.05;
if (newLatLonY > 90) {
newLatLonY -= 180;
}
diff --git a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java
index 49fc2d0149..a99224937c 100644
--- a/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java
+++ b/edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java
@@ -76,7 +76,7 @@ import com.vividsolutions.jts.io.WKTWriter;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 02/11/2009 1981 dhladky Initial Creation.
- *
+ * 09/03/2013 DR 13083 gzhang Added getZRvalue2() to fix an error.
*
*
* @author dhladky
@@ -1927,4 +1927,34 @@ public class ScanUtils {
return returns;
}
+
+ /**
+ * DR 13083: the first parameter zValue will use a radar bin's raw data
+ * since old version handles value 66 wrong in getDecodedDHRValue(int).
+ *
+ * Usage: to be called in FFMPProcessor.processRADAR(ArrayList):
+
+ 1). comment out fval line;
+ 2). call ScanUtils.getZRvalue2;
+ 3). use dataVals[j] as the first parameter in the step 2 above.
+ */
+ public static float getZRvalue2(double zValue, double coefficent,
+ double hailCap, double power, double bias) {
+ // The Fulton et al 1998 standard NWS Z-R relationship
+ double rValue = 0.0f;
+ if (zValue >= 2) {
+ zValue = MIN_DHR_DBZ + ((zValue - 2) * DHR_DBZ_STEP);
+ double rlogMult = Math.log10(coefficent);
+ rValue = bias*(Math.pow(10.0, ((zValue-10.0*rlogMult)/(10.0*power))));
+
+ // hail cap check
+ if (rValue > hailCap) {
+ return (float) (MM_TO_INCH * hailCap);
+ }
+ } else {
+ return (float) rValue;
+ }
+
+ return (float) (MM_TO_INCH * rValue);
+ }
}
diff --git a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults
index d3908a8b55..9904db1bca 100644
--- a/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults
+++ b/edexOsgi/com.raytheon.uf.common.ohd/utility/common_static/base/hydro/Apps_defaults
@@ -1,1871 +1,1876 @@
-#
-# Official National .Apps_defaults file for AWIPS Release OB8.3
-# Also see .Apps_defaults_site for override settings
-# Revision History:
-# 11/06/2001 - adjusted many directory locations of precip_proc tokens.
-# notable changes: st3_mkimage, rfcwide_input_dir
-# added pproc_local, pproc_local_data, pproc_log
-# grouped tokens together for 3 subsystems - shefdecode, whfs,
-# precip_proc.
-# placed precip_proc section after ofs since there are some
-# dependencies
-# changed value of whfs_editor
-# added hydro_publicbin token
-# added pproc_util_log_dir
-# 07/01/2002 - added ens_input, ens_output, ens_files
-# 07/22/2002 - add global gaff execution token
-# 11/04/2002 - added disagg tokens
-# 08/29/2003 - added sqlcmd_bin_dir
-# 08/20/2003 - added ligtning_input_dir, lightning_log_dir
-# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed,
-# mpe_msc_precip_limit
-# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold
-# - changed mpe_gage_qc token value to ON
-# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz
-# 2/4/2004 - added mpe_locbias_1hr_rerun token
-# 02/11/2004 - Added hv_map_projection.
-# 02/19/2004 - Removed stage2 and stage3 related tokens.
-# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens.
-# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir,
-# mpe_lsatpre_calc.
-# 03/19/2004 - Added mpe_del_gage_zeros.
-# 03/22/2004 - added sshp tokens
-# 03/24/2004 - Added rpf_min_dur_filled
-# 03/31/2004 - Added SSHP tokens
-# 04/26/2004 - added sshp_invoke_map_preprocess and
-# sshp_java_process_host tokens for the
-# mpe_fieldgen scripts
-# 05/06/2004 - Added more RFC archive database (adb) tokens
-# 06/28/2004 - Added preadj_outts_dir
-# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log
-# and gage_pp_sleep.
-# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data,
-# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip
-# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode
-# timeseries_showcat, timeseries_linewidth, dam_icon_color
-# 10/14/2004 - Added the mpe_generate_list token. BAL
-# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc
-# 11/05/2004 - Corrected spelling of timeseries_endime. RAE
-# 11/23/2004 - Added the mpe_show_missing_gage token.
-# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based
-# precipitation totals are derived.
-# 01/10/2005 - Added the sum_pc_reports token.
-# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the
-# development tree location of .Apps_defaults, a copy of it
-# will be placed in /awips/hydroapps with the lines modified
-# in the AWIPS modification block to work in the /awips/hydroapps
-# tree.
-# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and
-# adb_shef_pro_logs_dir.
-# Added the pghost, and pguser, pgport tokens for PostGres.
-# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx.
-# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window
-# token.
-# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length,
-# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward.
-# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens.
-# 5/11/2005 - Changed pguser token value to pguser.
-# 6/9/2005 - Changed value of grib_rls (location of gribit executable)
-# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib
-# 6/15/2005 - Changed value for d2d_input_dir token
-# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir
-# token. This directory will contain the precip edit polygons
-# drawn in Hydroview/MPE and applied in MPE Fieldgen.
-# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens.
-# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database
-# used by the historical data browser.
-#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to
-# be under local/data/app/mpe instead of local/data/mpe.
-#10/6/2005 - Added the mpe_base_radar_mosaic token.
-#02/7/2006 - Added the mpe_split_screen token.
-#02/8/2006 - Added tokens for the PDC Preprocessor
-#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order
-# tokens.
-#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens.
-#04/19/2006 - Added new tokens for controling the orientation/appearance
-# of the historical data browser and the locations of the help
-# and configuration directory.
-#05/30/2006 - Modified the token values for datview_plot_font and anav_data.
-# Added the following tokens for archive database programs:
-# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir,
-# adb_shef_raw_add_adjust, rax_pghost, adb_name
-#05/30/2006 - Added the mpe_send_qpe_to_sbn token.
-#06/06/2006 - Added the grib_set_subcenter_0 token.
-#07/07/2006 - Added the ifp_griddb_dir token.
-#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens.
-#10/02/2006 - Added the sshp_map_qpe_to_use token.
-#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token.
-#11/17/2006 - Added the mpe_qpe_sbn_dir token.
-#05/08/2007 - Added tokens for the rfc bias transfer project.
-#05/09/2007 - Added 3 tokens for SRG field directories
-#05/14/2007 - Added token for rdhm input directory
-#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to
-# sshp_background_forecast_length
-#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs,
-# show_vtecqc_window, event_expire_withinhr
-#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token.
-# Biasmesgen reads this token to determine whether or not
-# to send the locally generated MPE bias to the RPG if
-# the RFC bias is not available.
-#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime
-#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview
-# and MPE Editor.
-#10/24/2007 - Added dhm_rain_plus_melt_data_dir token
-#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir,
-# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit
-#1/16/2008 - added new tokens for disagg processing
-# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0
-#3/22/2008 - Added variable substitution for database port.
-#
-#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the
-# product name. It was mrmosaic. It is now mmosaic.
-#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens.
-# These tokens define the time window for the SSHP HPN Prerocessor.
-#07/07/08 - Added sshp_show_unadjusted_states // for sshp
-#
-#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application
-#10/03/12 - Added token section for script execution
-
-
-# ==============================================================================
-# To see syntax rules for this file, see the bottom of this file
-#
-# Also see .Apps_defaults_site for overriding settings
-#
-
-#$=============================================================================
-#$ This section contains the tokens whose values are different between the
-#$ development and the delivery tree. The value give is the development
-#$ value. The commented value is the delivery value. The uncommented value
-#$ is in the development tree. All of these tokens must be enclosed
-#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END
-#$ tags. Token names and commented lines should at column 1.
-
-#AWIPS_MODIFICATION_BLOCK_BEGIN
-
-apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory
-
-data_archive_root : /data_store # root directory of the data archive
-
-mcp3_icp_iface : $(HOME)/mcp3_ntrfc
-#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc
-
-verify_dir : $(apps_dir)/rfc/verify #base verify directory
-#verify_dir : /rfc_arc/verify #base verify directory
-
-vsys_dir : $(apps_dir)/rfc/verify #base verify directory
-#vsys_dir : $(verify_dir) #base verify directory
-
-#AWIPS_MODIFICATION_BLOCK_END
-
-#===================== Apps/Script Execution Tokens =================================
-WhfsSrv : ON
-WhfsSrv.purge_files : ON
-WhfsSrv.run_db_purge : ON
-WhfsSrv.run_floodseq : ON
-PprocSrv : ON
-PprocSrv.purge_mpe_files : ON
-PprocSrv.purge_hpe_file : ON
-MpeFieldGenSrv.run_mpe_fieldgen : ON
-WhfsSrv.run_pdc_pp : ON
-WhfsSrv.run_alarm_whfs : ON
-WhfsSrv.run_alarm_whfs.run_roc_checker : ON
-WhfsSrv.run_alarm_whfs.run_report_alarm : ON
-WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON
-ArealQpeGenSrv : ON
-DqcPreProcSrv : ON
-DqcPreProcSrv.run_dqc_preprocessor : ON
-MpeRUCFreezingLevel : ON
-MpeLightningSrv : ON
-#====================================================================================
-
-# ==============================================================================
-
-# Executable directory tokens.
-sys_java_dir : /awips2/java # Location of Java COTS software
-hydro_publicbin : $(apps_dir)/public/bin
-sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and
- # Linux beginning in OB3
-
-#################################################################################
-# Default Display Maps - comma separated list of maps with no spaces
-# Map names can be found in the localization perspective under
-# CAVE->Bundles->Maps. Use the filename without the extension.
-# statesCounties.xml -> statesCounties
-#
-# display_maps - default display maps for Hydro Perspective
-# mpe_display_maps - default display maps for MPE Perspective
-display_maps : statesCounties
-mpe_display_maps : statesCounties
-#################################################################################
-
-# database selection tokens
-server_name : ONLINE # Informix database server name
-db_name : hd_ob92lwx # IHFS database name
-damcat_db_name : dc_ob5xxx # Dam Catalog database name
-hdb_db_name : ob81_histdata # Historical database.
-pghost : localhost # The machine PostGres is running on
-pguser : awips # The user allowed to access PostGres
-pgport : 5432 # The PostGres Server port
-adb_name : adb_ob7xxx # RFC archive database name
-rax_pghost : ax # The machine PostGres is running on for the adb
-
-# vacuum log dir token.
-vacuum_log_dir : $(whfs_log_dir)/vacuum
-
-# WHFS specific tokens
-whfs_tz : EST5EDT # WHFS time zone for local time
-whfs_primary_radar : TLX # WHFS primary radar id, for Stage II
-
-# damcat tokens
-damcat_hostoffice_type : wfo # source of run-from office
-damcat_office_datasource : ohd # which data source is used
-max_storage_value : 0.00 # max storage volume filter
-damcat_data : /tmp/damcatData
-
-# Damcrest tokens
-damcrest.db_enabled : true # set to true when the user has damcat database
-damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially
-
-# Path to the editor used by Damcrest
-damcrest.editor : /usr/bin/gvim
-
-# Path to the damcrest data directory where input and output files
-# of the model are stored
-damcrest_data_dir : $(whfs_local_data_dir)/damcrest
-
-# Path to the directory where .vimrc resource file resides.
-# This resource file is needed when editor in Damcrest application
-# is set to gvim.
-damcrest_res_dir : $(whfs_config_dir)/damcrest
-
-#===================== SHEFDECODE Application Tokens ================================
-
-shefdecode_userid : oper # controlling UNIX user
-shefdecode_host : dx1f # controlling UNIX system.
-shefdecode_dir : $(apps_dir)/shefdecode # main directory location
-shefdecode_bin : $(shefdecode_dir)/bin # executable programs location
-shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location
-shef_data_dir : /data/fxa/ispan/hydro # input products location
-
-shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location
-shef_error_dir : $(shefdecode_dir)/logs/product # product log files location
-shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or
- # only when errors occur (=IF_ERROR)
-shef_perflog : ON # ON/OFF - create a separate performance log file to
- # save internal decoder timing messages for
- # monitoring performance
-shef_data_log : ON # ON/OFF - include messages in the log file detailing
- the SHEF records
-dupmess : ON # ON/OFF - include messages in the log file about
- # duplicate data
-elgmess : ON # ON/OFF - include messages in the log file about
- # data types not found in IngestFilter or
- # data types turned off in IngestFilter
-locmess : ON # ON/OFF - include messages in the log file about
- # stations and areas not found in Location
- # or GeoArea
-
-shef_sleep : 10 # sleep duration in seconds in between queries
-shef_winpast : 10 # number of days in past to post data
-shef_winfuture : 30 # number of minutes in future to post obs data
-shef_duplicate : IF_DIFFERENT # flag for handling duplicate date
- # ALWAYS_OVERWRITE-always overwrite when value repeats
- # USE_REVCODE-if revcode set overwrite duplicate value
- # IF_DIFFERENT-overwrite if new value is different
- # IF_DIFFERENT_OR_REVCODE-overwrite if new value is
- # different or revcode is set
-shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not
- # with (station id-PEDTSE) combinations as they
- # arrive in the input data flow
-shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages
- # to the TextProduct table
-shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables
- # IDS_ONLY - post only location identifiers for unknown
- # stations to the UnkStn table
- # IDS_AND_DATA - post all data from unknown stations to
- # the UnkStnValue table
-shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range
- # check to the physical element data tables (=PE) OR
- # to the RejectedData table (=REJECT)
-shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to
- # the observation data tables (=ON) or to
- # the ProcValue table (=OFF)
-shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table
- # VALID_ONLY - post data to the LatestObsValue table
- # ONLY if the gross range check is passed
-shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table
-shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast
- # height or discharge data being posted, load
- # the maximum forecast data into the RiverStatus table
-shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against
- # alert and alarm thresholds
-# -- Intermediate output from ShefParser prior to post
-shef_out : OFF
-
-
-#===================== WHFS Applications Tokens ================================
-
-whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree
-whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree
-whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree
-whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree
-whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree
-
-whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables
-
-whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds
-whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files
-whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS
-whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products
-whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports
-whfs_lines_per_page : 60
-
-whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files
-rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates
-metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config
-metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip "
-ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config
-hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc.
-hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc.
-rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir.
-
-whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids
-
-rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file.
-
-rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs
-rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs
-obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs
-whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs
-precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs
-floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs
-metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs
-hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs
-qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs
-
-db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token
-db_purge_backup_retention_use : ON # db_purge token for using backup retention value
-
-purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token
-
-whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables
-sws_parent_dir : $(whfs_bin_dir) # SWS parent dir
-sws_home_dir : $(whfs_bin_dir)/pa # SWS dir
-
-# -----------------------------------------------------------------
-# The Gage Precip Processor tokens
-# -----------------------------------------------------------------
-
-gage_pp_userid : oper # controlling UNIX user
-gage_pp_host : dx # controlling UNIX system
-gage_pp_data : $(pproc_local_data)/gpp_input # input data files location
-gage_pp_log : $(pproc_log)/gage_pp # daily log files location
-gage_pp_sleep : 10 # sleep duration in seconds in between queries
-gage_pp_enable : ON # gpp enabled; shef uses to determine post
-shef_post_precip : OFF # post to Precip/CurPrecip tables
-build_hourly_enable : ON # Enable the build_hourly application
-
-# ----------------------------------------------------------------
-# The following tokens are most likely to be customized by the user
-# (the first 4 MUST be customized at each site in the .Apps_defaults_site file)
-# ----------------------------------------------------------------
-hv_center_lat : 35.0 # HydroView center latitude
-hv_center_lon : -97.8 # HydroView center longitude
-hv_height_in_pixels : 900 # Hydroview map height in pixels
-hv_width_in_pixels : 1200 # Hydroview map width in pixels
-hv_map_width : 320 # HydroView map width (nautical miles)
-hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF)
-hv_hours_in_window : 4 # Change window hours
-hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out
-hv_disclosure_limit : 60 # Prog disclosure limit
-hv_zoom_threshold : 150 # nautical miles; Hydroview
- # detail level for cities/towns
-hv_map_projection : FLAT # Sets default map projection used in
- # hydroview/MPE. Options are FLAT, POLAR
- # or HRAP.
-hv_refresh_minutes : 15 # HydroView auto refresh time (minutes)
-hv_riverbasis : maxobsfcst # initial river basis for river characteristics
-hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered
- # by precip data.
-ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to
- # to use PPP and PPD reports for 24 hour
- # precip summaries.
- # values either obs, fcst, maxobsfcst
-shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF
- # encoded messages from Hydro Time Series
-whfs_editor : whfs_editor # WHFS text editor
-rpf_linewidth : 80 # width of line in RiverPro generated products
-rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro
-office_prefix : K # fourth char prepended to 3-char office id
-vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field
-vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field
-pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours)
-whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations
-whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs
-whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP
-whfs_printcommand_LX : lp # command used to print WHFS apps reports
- # on LX
-whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports
-
-dam_icon_color : BROWN # Color used for dam icon in Hydroview
-timeseries_begintime : 5 # number of days back relative to current time
-timeseries_endtime : 3 # number of days ahead relative to current time
-timeseries_showcat : 2 # scale by data and show categories
-timeseries_linewidth : 1 # width of line drawn on graph
-timeseries_mode : STATION # set to GROUP or STATION mode
-timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box
- # Defaults to off if not set
-rpf_stage_window : 0.5 # set stage window for determining the trend
- # variables in RiverPro
-show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro
-rpf_endtime_shifthrs : 6 # in RiverPro
-event_expire_withinhr : 3 # in RiverPro
-
-#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP=====
-# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office)
-
-gaff_execution : ON # ON/OFF token for the gen_areal_ffg process
- # the gen_areal_ffg process is run from the
- # process_dpa_files script at WFOs
-gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked
- # list is comma separated, no embedded
- # spaces are allowed
-gaff_input_dir : $(EDEX_HOME)/data/processing
- # directory containing gridded FFG
- # generated by RFCs
-gaff_look_back_limit : 60 # number of hours to look back for valid gridded
- # FFG data for input
-gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output
- # mosaicked gridded FFG in
- # netCDF format
-gaff_durations : 1,3,6 # FFG durations in hours
- # list is comma separated, no embedded
- # spaces are allowed
-
-
-# ================= "ds_" system tokens (see more in site file) ===============
-
-ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs
-util_dir : $(apps_dir)/rfc/nwsrfs/util
-calb_dir : $(apps_dir)/rfc/nwsrfs/calb
-ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp
-icp_dir : $(apps_dir)/rfc/nwsrfs/icp
-ens_dir : $(apps_dir)/rfc/nwsrfs/ens
-fld_dir : $(apps_dir)/rfc/fld
-
-
-hdb_dir : $(apps_dir)/rfc/hdb
-
-# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = =
-
-ofs_rls : $(ofs_dir)/bin/RELEASE
-util_rls : $(util_dir)/bin/RELEASE
-calb_rls : $(calb_dir)/bin/RELEASE
-ffg_rls : $(ffg_dir)/bin/RELEASE
-ifp_rls : $(ifp_dir)/bin/RELEASE
-icp_rls : $(icp_dir)/bin/RELEASE
-ens_rls : $(ens_dir)/bin/RELEASE
-hdb_rls : $(hdb_dir)/bin/RELEASE
-fld_rls : $(fld_dir)/bin/RELEASE
-xsets_rls : $(xsets_dir)/bin/RELEASE
-xnav_rls : $(xnav_dir)/bin/RELEASE
-xdat_rls : $(xdat_dir)/bin/RELEASE
-
-ofs_arc : $(ofs_dir)/bin/ARCHIVE
-util_arc : $(util_dir)/bin/ARCHIVE
-calb_arc : $(calb_dir)/bin/ARCHIVE
-ffg_arc : $(ffg_dir)/bin/ARCHIVE
-ifp_arc : $(ifp_dir)/bin/ARCHIVE
-icp_arc : $(icp_dir)/bin/ARCHIVE
-ens_arc : $(ens_dir)/bin/ARCHIVE
-hdb_arc : $(hdb_dir)/bin/ARCHIVE
-fld_arc : $(fld_dir)/bin/ARCHIVE
-xsets_arc : $(xsets_dir)/bin/ARCHIVE
-xnav_arc : $(xnav_dir)/bin/ARCHIVE
-xdat_arc : $(xdat_dir)/bin/ARCHIVE
-# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = =
-
-# LDAD shefencode tokens
-ldad_data_dir : /awips/ldad/data # the LDAD internal data dir
-shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl
-shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl
-
-# NWSRFS tokens
-
-rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt.
-rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files
-rfs_doc : $(rfs_dir)/doc # NWSRFS documentation
-
-# OFS tokens
-locks_dir : $(rfs_dir)/locks
-ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock
-ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock
-ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks.
-
-ofs_level : oper
-ofs_reor_lvl : oper_new
-ofs_inpt_grp : oper
-
-home_files_workstation : ds
-
-ofs_log_output : off # whether to output file r/w info
-ofs_error_output : on # whether to output file error info
-fortran_stderr : 7 # FORTRAN standard error unit
-
-ofs_bin : $(ofs_dir)/bin # OFS executables dir
-ofs_files : $(ofs_dir)/files # OFS file group
-ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir
-ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files
-ofs_output : $(ofs_dir)/output # OFS output dir
-ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
-ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
-ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir
-ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir
-ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir
-ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields
-ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir
-ofs_server : apwk01g2 # OFS "slave" server
-my_output : $(ofs_output)/$(LOGNAME) # users ofs output files
-
-ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd
-ndfd2rfs_output : $(my_output)
-ndfd2rfs_log_level : 0
-
-fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata
-
-# calb tokens
-calb_bin : $(calb_dir)/bin
-calb_lib : $(calb_dir)/lib
-
-calb_data_grp : oper
-calb_inpt_grp : oper
-calb_input : $(calb_dir)/input/$(calb_inpt_grp)
-calb_output : $(calb_dir)/output
-calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp)
-calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
-peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
-
-calb_gzio_read : off # whether or not to read gzipped DATACARD files
-calb_gzio_write : off # whether or not to write gzipped DATACARD files
-
-nwsrfs_calbfile_default : CARD # default calibration file type
-nwsrfs_platform : AIX # operating system
-
-# ICP tokens
-icp_bin : $(icp_dir)/bin
-icp_pw : hILLEL
-icp_scripts : $(icp_dir)/scripts
-
-mcp_decks : $(calb_input)/mcp3
-mcp_dir : $(calb_rls)
-
-# IFP tokens
-ifp_help_dir : $(ifp_dir)/help_files # IFP help files
-ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code
-ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code
-ifp_sys_dir : $(ifp_dir)/system # IFP system files
-ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files
-ifp_options_dir : $(ifp_dir)/options # IFP options files
-ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files
-ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files
-ifp_rfc : host # name of RFC to run
-ifp_num_columns : 3 # number of columns to display
-ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory
-ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim
-ifp_dhm_data_dir : /data/dhm/$(LOGNAME)
-ifp_griddb_dir : $(ifp_dhm_data_dir)/precip
-
-# Ensemble (ens) tokens
-
-espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp
-espadp_dir : $(ens_dir)
-preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts
-ens_input : $(ens_dir)/input/$(ofs_level)
-ens_output : $(ens_dir)/output
-ens_files : $(ens_dir)/files/$(ofs_level)
-ens_scripts : $(ens_dir)/scripts
-
-# ens_pre tokens
-##FXA_HOME : /px1data #taken out by kwz.2/11/04
-enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook
-ens_log_dir : $(ens_output)/$(ofs_level)
-ens_msglog_level : 5
-preadj_outts_dir : $(calb_area_ts_dir)/pre
-
-# FLDGRF tokens (added 6 April 2000)
-
-fldgrf_iface : $(HOME)/fldgrf
-
-# ofsde tokens
-
-ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir
- # (formerly ofsde_output_dir)
-ofsde_ndate : 7 # number of days to search for forecast temps
-ofsde_rrstime_check : OFF # flag to check obs times of RRS data
- # against window around 12Z (OFF/ON)
-
-# intervals for max/min temperatures (used by ofsde)
-# these represent number of hours around 12z
-
-intlrmn : 8
-inturmn : 2
-intlrzn : 2
-inturzn : 2
-intlrzx : 8
-inturzx : 2
-siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr
- # PP data from PC data
- # if running RFCWide, should be set to OFF
-
-# defaults for geographic data
-
-geo_data : $(apps_dir)/geo_data
-geo_util : $(geo_data)/util
-
-geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary
-geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii
-
-#===================== PRECIP_PROC Application Tokens ========================
-
-# precip_proc directory
-
-pproc_dir : $(apps_dir)/precip_proc # precip proc top
- # level dir
-pproc_bin : $(pproc_dir)/bin # dir with precip proc exes
-pproc_local : $(pproc_dir)/local # dir with local items, esp. data
-pproc_local_data : $(pproc_local)/data # dir with local data
-pproc_local_bin : $(pproc_local)/bin # dir with local bin
-pproc_log : $(pproc_local_data)/log # dir with local logs
-
-pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs
-
-# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*)
-
-dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs
-dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory
-dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory
-dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files
-dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives
-dpa_wind : 10
-
-
-dpa_filter_decode : ON # flag for non-top-of-hour
- # filtering of decoded products
- # ON - filter products for decode
- # OFF - do not filter (ie decode all products)
-
-dpa_decode_window : 10 # number of minutes around top
- # of hour for filtering products for
- # decoding
-
-dpa_archive : OFF # ON/OFF flag for archiving products
- # OFF - do not archive products
- # ON - archive products and filter based
- # on value of dpa_archive_window
-
-dpa_archive_window : 10 # number of minutes around top
- # of hour for filtering products for archiving
-
-dpa_dirname1 : $(data_archive_root)/radar # first part of directory name
- # containing DPA products for
- # associated or dial in radars
-dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name
- # containing DPA products for
- # associated or dial in radars
-dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids
-
-# siipp tokens
-
-intpc : 10 # interval (minutes) around top of hour for using PC data
-intlppp : 2
-intuppp : 2
-intppq : 2
-siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs
- # (formerly siipp_output_dir)
-
-# tokens for stageiii
-st3_help : $(pproc_local_data)/app/stage3/help # online help text
-
-st3_rfc : host
-awips_rfc_id : TUA # 3 char AWIPS RFC identifier
- # must be all upper case
-
-# tokens for stageiii output
-st3_mapx_id : xmrg # identifier for Stage 3 output
-st3_date_form : mdY # date format
- # current allowable = Ymd or mdY
- # similar to formatting codes for
- # strftime function
-
-st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX
- # ofs_griddb_dir defined outside of pproc
-st3_out_dir : $(pproc_local_data)/stage3
-post_output : $(st3_out_dir)/post_analysis
-
-# defaults for netCDF output
-
-st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok
- # underscores needed between words
-st3_netcdf_swlat : 33.603
-st3_netcdf_swlon : 106.456
-st3_netcdf_selat : 32.433
-st3_netcdf_selon : 92.322
-st3_netcdf_nelat : 38.027
-st3_netcdf_nelon : 90.678
-st3_netcdf_nwlat : 39.420
-st3_netcdf_nwlon : 106.652
-
-#defaults for auto stageiii
-st3_auto_graphic_scale : 2.4 # used by gif file generation
-
-#===================== disagg Tokens (old disagg process)========================
-
-disagg_msglog_level : 30 # message level
- # possible values are 1,10,20,30,...80
- # lower values signify less info in log
-
-disagg_dur : 24 # maximum duration of precip gage data to
- # be disaggregated
- # possible values = 2,3,...,24
-
-disagg_look_back : 0 # time (hours) to look back from current hour
- # for precip gage data to be disaggregated
-
-disagg_radius : 3 # number of HRAP bins within which the QPE
- # will be averaged for disagg
- # for example, if disagg_radius = 3, then
- # the 9 nearest neighbor QPE bin values
- # will be averaged
-disagg_set_date : 0 # identifier for current date (yyyymmdd).
- # Default value is 0 - set to
- # today date
-
-disagg_set_hour : 0 # identifier for current hour (hh).
- # Default value is 0
- # Possible values = 0,1,2,3,...,23
-
-disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs
-
-# =============== Multi-Sensor Precipitation Estimator (MPE) ================
-
-rfcw_rfcname : host
-rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen
-hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE
- # time lapse display
-
-### tokens for input ###
-
-rfcwide_input_dir : $(pproc_local_data)/app/mpe
-
-rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
-
-# the help_dir token needs a trailing slash because it is required byt
-# the RFC software the processes the help info...
-
-rfcwide_help_dir : $(rfcwide_input_dir)/help/
-rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin
-rfcwide_prism_dir : $(rfcwide_input_dir)/prism
-rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations
-rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height
-rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles
-
-### tokens for output ###
-### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below
-
-rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc
-
-rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles
-rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip
-
-rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic
-rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic
-rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic
-rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic
-rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic
-rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic
-rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic
-rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre
-rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly
-
-rfcwide_height_dir : $(rfcwide_output_dir)/height
-rfcwide_index_dir : $(rfcwide_output_dir)/index
-rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias
-rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan
-rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic
-
-rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe
-rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var
-rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var
-mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var
-
-# ==================== MPE Tokens ===============================
-
-#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off'
-mpe_dqc_options : off
-mpe_map_background_color : GRAY20 # The default color of the MPE map background
-mpe_temperature_window : 60 # The window in minutes the dqc preprocessor
- # searches around a synoptic time
- # (00z,06z,12z,18z) for temperature data.
-mpe_maxminT_hour_window : 2
-mpe_dqc_max_precip_neighbors : 30
-mpe_dqc_max_temp_neighbors : 20
-mpe_dqc_precip_deviation : 3.0
-mpe_dqc_temperature_deviation : 10.0
-mpe_dqc_min_good_stations : 5
-mpe_copy_level2_dqc_to_ihfs_shef : OFF
-mpe_copy_level2_dqc_to_archive_shef : OFF
-mpe_dqc_num_days : 10
-mpe_dqc_warningpopup : on
-mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the
- # Edit Precip Stations window.
- # OFF – if user sets 6hr value to Bad; 24hr value unaffected
- # ON - if user sets 6hr value to Bad; 24hr value set to Bad
- # Added at request of MBRFC to help with QC of SNOTEL.
-
-mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a
- # station to use the station to estimate the value at the grid bin.
-
-mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF
-
-mpe_dqc_execute_internal_script : OFF # ON/OFF
-
-mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR
-mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default
- # is false
-mpe_dqc_gridtype : SCALAR
-mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC
-mpe_dqc_lonorigin : -105.
-
-#daily qc preprocessor tokens
-dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z
-
-### MPE base directory tokens.
-mpe_dir : $(pproc_local_data)/mpe
-mpe_gageqc_dir : $(mpe_dir)/dailyQC
-mpe_scratch_dir : $(mpe_gageqc_dir)/scratch
-mpe_app_dir : $(pproc_local_data)/app/mpe
-mpe_fieldgen_product_dir : $(mpe_dir)
-
-### MPE station list tokens
-mpe_station_list_dir : $(mpe_app_dir)/station_lists
-mpe_site_id : ounx
-mpe_area_names : $(mpe_site_id)
-
-### MPE static data files
-mpe_prism_dir : $(mpe_app_dir)/prism
-mpe_misbin_dir : $(mpe_app_dir)/misbin
-mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles
-mpe_beamheight_dir : $(mpe_app_dir)/beam_height
-mpe_climo_dir : $(mpe_app_dir)/climo
-mpe_help_dir : $(mpe_app_dir)/help
-mpe_gridmask_dir : $(mpe_app_dir)/grid_masks
-mpe_basin_file : $(whfs_geodata_dir)/basins.dat
-
-### MPE precipitation gage qc directories
-mpe_precip_data_dir : $(mpe_gageqc_dir)/precip
-mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad
-mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev
-mpe_map_dir : $(mpe_precip_data_dir)/MAP
-mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid
-mpe_point_precip_dir : $(mpe_precip_data_dir)/point
-
-### MPE temperature gage qc directories
-mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature
-mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad
-mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev
-mpe_mat_dir : $(mpe_temperature_data_dir)/MAT
-mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid
-mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point
-
-### MPE freezing level gage qc directories
-mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level
-mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ
-mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid
-mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point
-ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC
-
-### MPE 1 hour mosaics and fields and supporting reference fields.
-mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic
-mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic
-mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic
-mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files
-mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon
-mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly
-mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles
-mpe_height_dir : $(mpe_fieldgen_product_dir)/height
-mpe_index_dir : $(mpe_fieldgen_product_dir)/index
-mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic
-mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias
-mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan
-mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre
-mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic
-mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic
-mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic
-mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic
-mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic
-mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic
-mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe
-mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn
-mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif
-mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib
-mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn
-mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg
-mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf
-mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic
-mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var
-mpe_state_var : $(mpe_fieldgen_product_dir)/state_var
-mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic
-mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic
-mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic
-mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
-mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic
-mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic
-mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1
-mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2
-mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3
-
-### Tokens related to the MPE Editor map display.
-mpe_config_dir : $(whfs_config_dir)
-mpe_center_lat : 39.8
-mpe_center_lon : -98.55
-mpe_height_in_pixels : 900
-mpe_width_in_pixels : 1200
-mpe_map_width : 1320
-mpe_zoom_out_limit : 20
-mpe_disclosure_limit : 60
-mpe_map_projection : FLAT
-
-### Misc tokens
-mpe_load_hourlypc : ON
-mpe_gageqc_gif_dir : $(whfs_image_dir)
-mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0
-mpe_overlay_dir : $(whfs_geodata_dir)
-mpe_editor_logs_dir : $(pproc_log)/mpe_editor
-mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP
-
-### New tokens for DQC/CHPS
-mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2”
-mpe_td_details_set : OFF # Allow generating a time distribution details file.
-mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF"
-mpe_map_one_zone : OFF # Allow MAP generation for one zone only
-fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir
-nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file
-netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files
-mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF
-mpe_dqc_save_grib : OFF # Save Daily QC as grib
-
-### Tokens which control the products generated by MPE Fieldgen.
-mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to
- # determine if local bias should be
- # recalculated as part of the mpe_fieldgen
- # rerun from hmap_mpe
- # ON -- recalc loc bias on rerun
- # OFF -- do not recalc loc bias on rerun
-mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage
- # value should be removed from consideration
- # if the radar shows > 0.0
- # ON -- check for and remove zero gage values
- # OFF -- do not check for or remove zero
- # gage values
-
-mpe_selected_grid_gagediff : MMOSAIC
-
-mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe
-mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2
-mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields
- # that mpe_fieldgen generates
-mpe_show_missing_gage : None # MPE missing gage display.
- # (None,All,Reported)
-mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages
-
-### directory locations of various format MPE output grid files
-mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif
-mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg
-mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf
-mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib
-
-### which format MPE output grid files to save
-mpe_save_gif : nosave
-mpe_save_jpeg : nosave
-mpe_save_netcdf : nosave
-mpe_save_grib : save
-
-### prefixes for various format MPE output grid files, blank by default
-mpe_gif_id :
-mpe_jpeg_id :
-mpe_netcdf_id :
-mpe_grib_id :
-
-### mpe gage QC tokens
-mpe_gage_qc : ON
-mpe_sccqc_threshold : 2.0
-mpe_scc_boxes_failed : 4
-mpe_msc_precip_limit : 1.0
-mpe_split_screen : OFF
-
-### mpe polygon tokens
-mpe_polygon_action_order : None
-mpe_polygon_field_order : None
-
-### tokens which control the transmission of RFC bias data.
-mpe_transmit_bias : OFF
-transmit_bias_on_save : NO
-transmit_bias_on_rerun : NO
-rfc_bias_input_dir : $(mpe_dir)/bias_message_input
-rfc_bias_output_dir : $(mpe_dir)/bias_message_output
-process_bias_log_dir : $(pproc_log)/process_bias_message
-send_local_bias_when_rfc_bias_missing : NO
-
-### rfc qpe to wfo tokens
-mpe_send_qpe_to_sbn : OFF
-mpe_generate_areal_qpe : OFF
-# List of RFCs to process for Gen Areal Qpe
-gaq_rfc_list : MBRFC,NCRFC
-gaq_dur_list : 1,6,24
-gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe
-gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE
-gaq_log_dir : $(pproc_log)/gen_areal_qpe
-gaq_rfc_mask_dir : $(gaq_app_dir)
-gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp
-gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01
-gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06
-gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24
-gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib
-
-### token which controls how PC precipitation totals are derived.
-sum_pc_reports : NO
-
-geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc
-geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii
-adjust_PC_startingtime : 4 #allow PC starting time tolerance
-
-### tokens for sending MPE mean field bias data to the ORPG
-
-bias_message_dir : $(apps_dir)/data/fxa/radar/envData
-
-### tokens for Lightning Data processing
-
-lightning_input_dir : /data/fxa/point/binLightning/netcdf
-
-lightning_log_dir : $(pproc_log)/lightning_proc
-
-### tokens for D2D display
-
-mpe_d2d_display_grib : ON # ON/OFF token to determine if further
- # processing of grib file for D2D display
- # is required
-
-d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files
- # to be processed for D2D display
-
-mpe_send_grib : OFF # ON/OFF token to determine if grib file is
- # to be sent to other sites such as NPVU
-
-# disagg processing tokens
-
-mpe_disagg_execute : OFF
-mpe_disagg_method : POINT
-mpe_disagg_6hreq_0 : 1
-mpe_disagg_6hrgt_0 : 1
-
-#====== High-resolution Precipitation Estimator (HPE) tokens====================
-
-# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*)
-
-dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs
-
-dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory
-
-dhr_dirname1 : $(data_archive_root)/radar # first part of directory name
-# # containing DHR products for
-# # associated or dial in radars
-
-dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name
- # containing DHR products for
- # associated or dial in radar
-dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids
-
-dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files
-dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives
-
-# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*)
-
-dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs
-
-dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory
-
-dsp_dirname1 : $(data_archive_root)/radar # first part of directory name
-# # containing DSP products for
-# # associated or dial in radars
-
-dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name
- # containing DSP products for
- # associated or dial in radars
- # NOTE that DSP is level256 vs level16 for
- # STP and this is where it is stored
- # in AWIPS
-dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids
-dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files
-dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives
-
-
-hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC
-hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe
-
-hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
-hpe_input_dir : $(pproc_local_data)/app/hpe
-hpe_output_dir : $(pproc_local_data)/hpe
-hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var
-
-hpe_log_dir : $(pproc_local_data)/log/hpe
-
-hpe_hrap_grid_factor : 4 # 1 for HRAP grid
- # 4 for quarter HRAP grid
-
-hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic
-hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic
-hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic
-hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic
-hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic
-hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic
-hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre
-
-hpe_dspheight_dir : $(hpe_output_dir)/height
-hpe_dspindex_dir : $(hpe_output_dir)/index
-hpe_height_dir : $(hpe_output_dir)/height
-hpe_index_dir : $(hpe_output_dir)/index
-
-hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib
-dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf
-dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif
-hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib
-bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf
-bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif
-hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib
-ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf
-ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif
-hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib
-ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf
-ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif
-
-dhrmosaic_save_grib : save
-dhrmosaic_save_gif : nosave
-dhrmosaic_save_netcdf : nosave
-bdhrmosaic_save_grib : save
-bdhrmosaic_save_gif : nosave
-bdhrmosaic_save_netcdf : nosave
-ermosaic_save_grib : save
-ermosaic_save_gif : nosave
-ermosaic_save_netcdf : nosave
-ebmosaic_save_grib : save
-ebmosaic_save_gif : nosave
-ebmosaic_save_netcdf : nosave
-
-hpe_gif_dir : $(hpe_output_dir)/hpe_gif
-hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg
-hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf
-hpe_grib_dir : $(hpe_output_dir)/hpe_grib
-hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg
-hpe_save_gif : nosave
-hpe_save_jpeg : nosave
-hpe_save_netcdf : nosave
-hpe_save_grib : nosave
-
-dhr_window : 15
-dsp_window : 15
-dsp_duration : 60
-
-hpe_base_radar_mosaic : ERMOSAIC
-hpe_qpe_fieldtype : ERMOSAIC
-hpe_load_misbin : OFF
-hpe_debug_log : ON
-hpe_use_locbias : OFF
-hpe_runfreq : 5
-hpe_timelag : 5
-hpe_bias_source : RFC
-hpe_rfc_bias_lag : 2
-hpe_purge_logage : 720
-hpe_purge_fileage : 180
-hpe_purge_xmrgage : 75
-
-dhrmosaic_d2d_display_grib : ON
-ermosaic_d2d_display_grib : ON
-ebmosaic_d2d_display_grib : ON
-bdhrmosaic_d2d_display_grib : ON
-hpe_run_nowcast : ON
-hpe_nowcast_generate_list : PRTM, BPTRM
-hpe_nowcast_dir : $(hpe_output_dir)/nowcast
-hpe_rate_save_grib : save
-hpe_brate_save_grib : save
-hpe_tp1h_save_grib : save
-hpe_btp1h_save_grib : save
-hpe_4km_tp1h_save_grib : nosave
-hpe_4km_btp1h_save_grib : nosave
-nowcast_d2d_display_grib : ON
-hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method
-hpn_use_meanvelocity : OFF
-hpn_meanvelocity_direction : 45 # direction precip is moving towards
-hpn_meanvelocity_speed : 20 # miles per hour
-
-
-hpe_send_grib : OFF # ON/OFF token to determine if grib file is
- # to be sent to other sites such as NPVU
-
-#========END HPE tokens======================================================
-
-# ================= Flash Flood Guidance System =============================
-
-ffg_level : oper
-
-ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg
-ffg_bin : $(ffg_dir)/bin # FFG execute dir
-ffg_files : $(ffg_dir)/files # FFG file group
-ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir
-ffg_out_dir : $(ffg_dir)/output # FFG output dir
-ffg_grib_out : $(ffg_out_dir)/grib # GRIB output
-ffg_scripts : $(ffg_dir)/scripts # FFG scripts
-ffg_gff_level : grff # regular grid ffg dir
-ffg_gro_level : grro # regular grid ro dir
- .Apps_defaults
-ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir
-ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir
-ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir
-ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir
-ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir
-ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir
-ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir
-
-ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters
-ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products
-ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir
-ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir
-ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir
-
-# ffg program control
-ffg_error_output : on # whether to output error messages
-ffg_log_output : off # whether to output log messages
-
-# ===================== GRIB packer/encoder =================================
-
-grib_dir : $(apps_dir)/rfc/grib # Top level grib
-grib_rls : $(pproc_bin) # location of gribit executable
-grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive
-grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded
-grib_out_dir : $(grib_dir)/output # GRIB encoded files
-grib_error_output : on # turn on/off GRIB error output
-grib_set_subcenter_0 : off # set subcenter to 0
- # on - set subcenter to 0
- # off - do not set subcenter to 0
-
-# end of ffg apps
-
-#================== XSETS Apps_defaults Tokens - 08/03/2001 ===================
-
-# [] = default value
-#.................................
-# Date Control
-#.................................
-xsets_date_used : SYSTEM # computer system clock
- # OFSFILES = forecast time series
- # mm/dd/ccyy = explicit date, 12Z
-
-#.................................
-# Directories and files to use
-#.................................
-xsets_dir : $(apps_dir)/rfc/xsets
-xsets_level : oper
-xsets_files : $(xsets_dir)/files
-xsets_xsfiles : $(xsets_files)/$(xsets_level)
-xsets_param_dir : $(xsets_xsfiles)/param
-xsets_config_file : xsetsconfig
-xsets_output_dir : $(xsets_xsfiles)/output
-
-#.................................
-# Commands
-#.................................
-xsets_editor : "nedit"
-xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot"
-xsets_print_cmd : "lp"
-xsets_xmit_cmd : "cat "
-
-#.................................
-# Parameters for creation of hydrographs
-#.................................
-xsets_hydro_button : NO # Create Make Hydro button, [NO]
- (currently unused)
-xsets_make_hydro : NO # Create .gif hydrographs, [NO]
-
-#.................................
-# NEW_HYDROPLOTS parameters
-#.................................
-xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on
- web server
-xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on
- web server
-xsets_hydrographs_html : 1 # 1 = create basic html
- 0 = no html created
-xsets_hydrographs_output: "$(xsets_output_dir)/gifs"
-xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param
-
-#.................................
-# File Print Options and Settings
-#.................................
-xsets_add_remarks : NO # Add remark after each site, [NO]
-xsets_brackets : NO # Put brackets around latest stage,
- # forecasts and dates, [NO]
-xsets_cmt_line : NO # YES = separate line,
- # NO = append to description, river
-xsets_expanded_dates : YES # Insert MMDD before values, [NO]
-xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string)
-xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages
-xsets_output_style : E # E = Expanded, each day has line,
- # C = Compact
-xsets_print_crests : YES # Print crest comment, [NO]
-xsets_print_disclaimer : YES # Print disclaimer, [NO]
-xsets_print_fs : YES # YES = encode flood stage in SHEF,
- # [NO] = display as comment
-xsets_print_fs_cross : COMMENT # Time level passes flood stage
- # [NO] = don't include,
- # SHEF = encode in SHEF,
- # COMMENT = display as comment
-xsets_print_ls : COMMENT # Latest stage
- # [NO] = don't include,
- # SHEF = encode in SHEF,
- # COMMENT = display as comment
-xsets_print_MAP : NO # Print MAP values, [NO]
-xsets_print_qpf : COMMENT # Print QPF values
- # [NO] = don't include,
- # SHEF = encode in SHEF,
- # COMMENT = display as comment
-xsets_print_ws : YES # Display warning/caution stage, [NO]
-xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS
-xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows
-xsets_signature : $(LOGNAME) #User signature (string)
-xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id
-xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string)
-xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS
-
-#.................................
-# Run Options
-#.................................
-xsets_age_check : 6 # Number of hours old of forecast before
- # error generated, [6]
-xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]???
-xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused
-xsets_msg_obs_warn : YES # Print warning when observed values are
- # missing, [NO]
-xsets_numhrs_curob : 12 # number of hours back from current time to use
- # informix obs as "current obs"
-xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product
-xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product
-xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG
-xsets_ofs_select : OFS # OFS or IFP for time series files
-xsets_stdout : NO # Send wprint messages to stdout, [NO]
-xsets_time : Z # Time Zone code used in product
- # ([Z], E, C, M, P, A, H OR N)
-# ================== end of xsets tokens =======================================
-
-#================== XNAV Apps_defaults Tokens - 03/29/2000 ====================
-# defaults for program XNAV
-
-xnav_user : oper
-
-#.................................
-# Date/time related tokens
-#.................................
-db_days : 10
-xnav_daily_days : 30
-xnav_ffg_periods : 3
-xnav_sixhr_periods : 40
-xnav_hyd_days_fut : 5
-xnav_hyd_days_prev : 5
-xnav_precip_hours : 240
-xnav_settoday :
-
-#.................................
-# Directories and files to use
-#.................................
-xnav_dir : $(apps_dir)/rfc/xnav
-xnav_data : $(xnav_dir)/data
-xnav_params : $(xnav_dir)/parameters
-xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
-xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
-xnav_bin_dir : $(xnav_dir)/bin
-xnav_data_dir : $(xnav_data)
-xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user)
-xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary
-xnav_gif_dir : $(HOME)/gifs/xnav
-xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff
-xnav_localdata_dir : $(xnav_data)/localdata
-xnav_misc_dir : $(xnav_data)/misc_data
-xnav_qpfbin_dir : $(xnav_data)/wfoqpf
-xnav_rfcfmap_dir : $(xnav_data)/rfcqpf
-xnav_rules_dir : $(xnav_params)/rules
-xnav_shefdata_dir : $(xnav_data)/shefdata
-xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products
-xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
-nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap
-
-#.................................
-# Fonts and colors
-#.................................
-xnav_action_color : yellow
-xnav_flood_color : red
-xnav_ok_color : green
-xnav_ts1_color : yellow
-xnav_ts2_color : magenta
-xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-*
-xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
-xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*"
-
-idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*"
-idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*"
-
-#.................................
-# Window size controls
-#.................................
-xnav_hrap_x : 59
-xnav_hrap_xor : 311
-xnav_hrap_y : 83
-xnav_hrap_yor : 410
-xnav_hydro_height : 400
-xnav_hydro_width : 750
-xnav_scale : 8.0
-xnav_scale_colors : 3.0
-xnav_x_offset : 100
-xnav_y_offset : 100
-
-#.................................
-# Display options
-#.................................
-xnav_basins : yes
-xnav_counties : no
-xnav_cwas : no
-xnav_fgroups : no
-xnav_flights : no
-xnav_grid : no
-xnav_hydro_segments : no
-xnav_radars : no
-xnav_rfc : yes
-xnav_rivers : yes
-xnav_states : yes
-xnav_towns : yes
-
-#.................................
-# Other control options
-#.................................
-load_db_on_boot : no
-load_ofs_on_boot : no
-check_flood_on_boot : no
-use_new_xmrg : yes
-xnav_afosid : ? #PITRR1RHA
-xnav_editor : nedit
-xnav_exception_file : exception_file
-xnav_grid_ffg_pattern : xhr
-xnav_locrangecheck : no
-xnav_office_hdr : ? #KRHA
-xnav_only_use_ofs_data : no
-xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC"
-xnav_precip_filter : .01
-xnav_route_code : ? #ES
-xnav_seg_type : 2
-xnav_send_shef : no
-xnav_show_p1_files : yes
-xnav_suppress_msg : yes
-xnav_xmit_cmd : "cat "
-
-# ====== MAKE24HRXMRG Tokens ======
-
-make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number
- # of days back argument to program.
-make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output.
-make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not
- # given.
-make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt).
-
-# ================== end of xnav tokens ========================================
-
-#================== XDAT Apps_defaults Tokens - 03/29/2000 ====================
-# defaults for program XDAT
-
-xdat_user : oper
-
-#................................
-# Date/time related tokens
-#................................
-xdat_flood_hours : 6
-xdat_settoday :
-
-#..................................
-# Directories and files to use
-#..................................
-xdat_dir : $(apps_dir)/rfc/xdat
-xdat_data : $(xdat_dir)/data
-xdat_params : $(xdat_dir)/parameters
-xdat_groups_dir : $(xdat_params)/groups
-xdat_localdata_dir : $(xdat_data)/localdata
-xdat_shefdata_dir : $(xdat_data)/shefdata
-
-#..................................
-# Fonts and colors to use
-#..................................
-xdat_label_font : ncenb14
-xdat_list_font : helvb14
-xdat_text_font : user14x19
-xdat_pb_font : ncenb14
-
-#.................................
-# Window size controls
-#.................................
-xdat_scale : 1.0
-
-#..................................
-# Display Options
-#..................................
-xdat_clear_id : yes
-
-#..................................
-# Other Control Options
-#..................................
-xdat_afosid : ?ofstest?
-xdat_office_hdr : ???
-xdat_post_unk : $(shef_post_unk)
-xdat_route_code : ???
-xdat_send_shef : no
-xdat_xmit_cmd : "cat "
-# ================== end of xdat tokens ========================================
-
-#====================== Shape Data File Directory ==============================
-shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape
- # files acting as data files
-
-
-#================== send_rfc Apps_defaults Tokens - 3/08/2001 =================
-send_rfc_dir : $(apps_dir)/rfc/send_rfc
-send_rfc_input_dir : $(send_rfc_dir)/data/send
-send_rfc_id : WWW
-send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF
-send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods
-send_rfc_hpc : 0
-send_rfc_host : ds-www
-send_rfc_alternate : 0
-# ================== end of send_rfc tokens ====================================
-
-#================== verify Apps_defaults Tokens - 08/03/2001 ==================
-# defaults for program verify
-vsys_output : $(vsys_dir)/output #location of output files
-vsys_input : $(vsys_dir)/input #location of input files
-vsys_files : $(vsys_dir)/files #location of verify files
-vsys_scripts : $(vsys_dir)/scripts #location of verify scripts
-vsys_output_log : test.log #name of log file
-vsys_ihfsdb : $(db_name) #ihfs_db name
-vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc"
-verify_rls : $(vsys_dir)/bin/RELEASE #The release directory.
-vsys_rls : $(verify_rls) #Not really needed, but consistent.
-
-# ================== end of verify tokens ======================================
-
-# ================== RFC Archive Database tokens ===============================
-
-archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data
-archive_enable : OFF # ON/OFF - Enable or Disable
- # archive data feed (OFF by default)
-metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory
- # used if archive_enable is ON
-
-#================== Directory tokens for RFC Archive Database ==================
-adb_dir : /rfc_arc # Base RFC Archive Directory
-adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory
-adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory
-adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory
-adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory
-adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory
-adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory
-adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory
-
-#================== Shefdecode tokens for RFC Archive Database =================
-
-adb_shef_winpast : 9999 # number of days in past to post data for RAW
-adb_shef_winfuture : 9999 # number of mins in future to post obs data
- # for RAW.
-adb_shef_winpast_pro : 9999 # number of days in past to post data
-adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data
-shefdecode_rax_userid : oper # controlling UNIX user
-adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file
- # location
-adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance
- # log file to save internal decoder timing
- # messages for monitoring performance
-adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the
- # daily logs directory
-adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the
- #product logs directory
-adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only
- # when errors occur (=IF_ERROR)
-adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables
- # values IDS_ONLY or IDS_AND_DATA
- # will post everything
- # to the UnkStnValue table
-adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables
- # values IDS_ONLY or IDS_AND_DATA
- # will post everything
- # to the UnkStnValue table
-adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance
- # log file to save internal decoder timing
- # messages for monitoring performance
-adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the
- # daily logs directory
-adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the
- # product logs directory
-adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only
- # when errors occur (=IF_ERROR)
-adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables
-adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables
-adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be
- # posted for raw decoder.
-adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder.
-adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder.
-adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder.
-adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw
- # decoder.
-adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table
- # ON will write to both pecrsep and peirsep tables
-adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder.
-adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder.
-adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro
- # decoder.
-adb_shef_pro_tmp_dir : $(adb_pro_que)
-adb_shef_raw_tmp_dir : $(adb_raw_que)
-adb_shef_raw_add_adjust : OFF
-
-#========== IHFS->RAX synchronization tokens for RFC Archive Database ==========
-adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files
-adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE
-adb_sync_tablenames : ALL # List of table names to synchronize
-adb_sync_ihfs_ingest: USE # USE or IGNORE
-adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH
-
-
-#================== DatView program tokens for RFC Archive Database ============
-datview_db_name : $(adb_name)
-datview_startdate : '1975-01-01 00:00:00'
-datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
-datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
-datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
-datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1
-datview_bg_color : black
-datview_fg_color : white
-datview_ob_color1 : green
-datview_ob_color2 : blue
-datview_ob_color3 : yellow
-datview_ob_color4 : red
-datview_ob_color5 : DarkOrange
-datview_ob_color6 : SlateGray1
-datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1
-datview_plot_width : 750
-datview_plot_height : 420
-datview_data_dir : /home/oper
-datview_raw_shef_dir : $(adb_raw_que)
-datview_pro_shef_dir : $(adb_pro_que)
-datview_office_header : KTUA # to be set by each RFC
-datview_pil : OKCRR1TUR # to be set by each RFC
-
-
-#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ==================
-# defaults for program ARCNAV
-
-anav_user : oper
-
-#.................................
-# Date/time related tokens
-#.................................
-anav_daily_days : 30
-anav_sixhr_periods : 40
-anav_precip_hours : 24
-
-
-#.................................
-# Directories and files to use
-#.................................
-
-anav_dir : /awips/hydroapps/lx/rfc/xnav
-anav_data : /data
-anav_flatfiles : $(anav_data)/flatfiles
-anav_params : $(anav_dir)/parameters
-anav_data_dir : $(anav_data)
-anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary
-anav_gif_dir : /rfc_arc/data/arcnav/gifs
-anav_localdata_dir : $(anav_data)/localdata
-anav_xmrg_dir : $(anav_flatfiles)
-
-#.................................
-# Fonts and colors
-#.................................
-anav_label_font : courb14gr
-anav_legend_font : courb14gr
-anav_list_font : courb14gr
-anav_menu_font : 9x15
-anav_pb_font : courb12gr
-anav_text_font : helvb18gr
-anav_toggle_font : courb14gr
-anav_town_font : courb12gr
-
-#.................................
-# Window size controls
-#.................................
-anav_hrap_x : 200
-anav_hrap_xor : 850
-anav_hrap_y : 200
-anav_hrap_yor : 470
-anav_hydro_height : 400
-anav_hydro_width : 750
-anav_scale : 3.5
-anav_scale_colors : 3.0
-anav_x_offset : 300
-anav_y_offset : 300
-
-#.................................
-# Display options
-#.................................
-anav_basins : yes
-anav_counties : no
-anav_cwas : no
-anav_fgroups : no
-anav_flights : no
-anav_grid : no
-anav_hydro_segments : no
-anav_radars : no
-anav_rfc : no
-anav_rivers : no
-anav_states : yes
-anav_towns : yes
-
-#.................................
-# Other control options
-#.................................
-anav_editor : nedit
-anav_suppress_msg : yes
-
-#......................................
-# tokens added for arcnav application
-# for future use
-#......................................
-anav_ok_color : green
-anav_action_color : yellow
-anav_flood_color : red
-anav_ts1_color : yellow
-anav_ts2_color : magenta
-
-# ================= end of arcnav tokens ======================================
-
-# ================== end of RFC Archive Database tokens ========================
-
-# ================== SSHP Directory Structure and application tokens ===============================
-
-local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer
-sshp_control_dir : $(whfs_local_data_dir)/app/sshp
-sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text
-sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml
-sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml
-sshp_incoming_dir : $(local_data_sshp_dir)/incoming
-sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing
-sshp_log_dir : $(whfs_log_dir)/sshp
-sshp_java_process_host : px1f
-sshp_invoke_map_preprocess: ON
-sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY
-sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts
-sshp_initial_forecast_length: 24 # length of forecast in hours
-sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI
-sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states
-sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states -
- # negative -2 means 2 hours BEFORE last top of hour
-sshp_adjustment_pairing_minutes : 70
-sshp_adjustment_interpolation_hours : 3
-sshp_show_simulated_timeseries : true
-
-sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir
-sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files
-sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast
-sshp_background_forecast_length : 48 # length of a background forecast
-
-sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour
-sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour
-
-sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states
-# ==================== Radar Climatology Tokens ==============================
-radclim_data_dir : $(pproc_local_data)/app/radclim
-
-# ==================== PDC Preprocessor Tokens ===============================
-pdc_clean_cache_minutes : 60
-pdc_temperature_hours : 168
-pdc_height_hours : 168
-pdc_snow_hours : 168
-pdc_wind_hours : 168
-pdc_weather_hours : 168
-pdc_precip_hours : 168
-pdc_lower_window : 5
-pdc_upper_window : 5
-
-pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp
-pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp
-
-# ====================== Historical Data Browser Tokens =======================
-
-hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help
- # files
-hdb_script_directory : $(hdb_dir)/scripts # Historical data browser
- # scripts dir
-hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser
- # configuration file directory
-
-hdb_height_in_pixels : 900 # Historical data browser map height in
- # pixels
-hdb_width_in_pixels : 1200 # Historical data browser map width in
- # pixels
-hdb_center_lat : 35 # The initial center latitude of the HDB
-hdb_center_lon : -88.9 # The initial center longitude of the HDB
-hdb_map_width : 2999.862 # The width in nautical miles of the area
- # displayed in the HDB
-hdb_disclosure_limit : 60 # The disclosure limit for displaying finer
- # detail in the city overlay.
-hdb_map_projection : FLAT # The initial map projection used by HDB.
- # Possible values: FLAT, POLAR, HRAP
-# ====================== DHM Token =======================
-dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir
-dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir
-dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir
-rdhm_input_dir : $(geo_data)
-dhm_rain_plus_melt_data_dir: $(geo_data)
-# ================== end of SSHP Directory Structure tokens ========================
-
-
-# The syntax needed in the file is:
-#
-# token : resource
-#
-# where: token is defined as a string delimited by white space or
-# the delimiter,
-# the delimiter between token and resource is the :,
-# no white space needs to surround the delimiter,
-# comments are indicated by a #,
-# neither token nor resource can begin with a # or :,
-# a # or a : can be embedded within resource,
-# resource can contain white space if it is bounded by
-# the ' or " characters,
-# blank lines are allowed.
-# referbacks are indicated by $(...). The '...' is resolved
-# the same way any other token is, and is substituted for
-# the $(...) string to compose the final resource value.
-# Multiple referbacks are allowed in a resource, but
-# embedded referbacks are not allowed (i.e. no
-# $($(...)) allowed).
-# Note that this file is read only if the token can not be resolved
-# as an environment variable.
-#
-# ==============================================================================
+#
+# Official National .Apps_defaults file for AWIPS Release OB8.3
+# Also see .Apps_defaults_site for override settings
+# Revision History:
+# 11/06/2001 - adjusted many directory locations of precip_proc tokens.
+# notable changes: st3_mkimage, rfcwide_input_dir
+# added pproc_local, pproc_local_data, pproc_log
+# grouped tokens together for 3 subsystems - shefdecode, whfs,
+# precip_proc.
+# placed precip_proc section after ofs since there are some
+# dependencies
+# changed value of whfs_editor
+# added hydro_publicbin token
+# added pproc_util_log_dir
+# 07/01/2002 - added ens_input, ens_output, ens_files
+# 07/22/2002 - add global gaff execution token
+# 11/04/2002 - added disagg tokens
+# 08/29/2003 - added sqlcmd_bin_dir
+# 08/20/2003 - added ligtning_input_dir, lightning_log_dir
+# 10/03/2003 - added tokens gage_qc, sccqc_threshold, mpe_scc_boxes_failed,
+# mpe_msc_precip_limit
+# 10/10/2003 - changed token names to mpe_gage_qc, mpe_sccqc_threshold
+# - changed mpe_gage_qc token value to ON
+# 02/04/2004 - Added new tokens for ens_pre netCDF enhancement --kwz
+# 2/4/2004 - added mpe_locbias_1hr_rerun token
+# 02/11/2004 - Added hv_map_projection.
+# 02/19/2004 - Removed stage2 and stage3 related tokens.
+# 03/10/2004 - Added mpe_mlmosaic_calc and rfcwide_mlmosaic_dir tokens.
+# 03/16/2004 - Added rfcwide_lsatpre_dir, rfcwide_satstate_var_dir,
+# mpe_lsatpre_calc.
+# 03/19/2004 - Added mpe_del_gage_zeros.
+# 03/22/2004 - added sshp tokens
+# 03/24/2004 - Added rpf_min_dur_filled
+# 03/31/2004 - Added SSHP tokens
+# 04/26/2004 - added sshp_invoke_map_preprocess and
+# sshp_java_process_host tokens for the
+# mpe_fieldgen scripts
+# 05/06/2004 - Added more RFC archive database (adb) tokens
+# 06/28/2004 - Added preadj_outts_dir
+# 07/31/2004 - Added gage_pp_userid, gage_pp_host, gage_pp_data, gage_pp_log
+# and gage_pp_sleep.
+# 08/10/2004 - ssh- Added gage_pp_userid, gage_pp_host, gage_pp_data,
+# gage_pp_log, gage_pp_sleep, gage_pp_enable, shef_post_precip
+# 08/12/2004 - Added timeseries_begintime, timeseries_endtime, timeseries_mode
+# timeseries_showcat, timeseries_linewidth, dam_icon_color
+# 10/14/2004 - Added the mpe_generate_list token. BAL
+# 10/14/2004 - Removed the tokens: mpe_mlmosaic_calc, mpe_lsatpre_calc
+# 11/05/2004 - Corrected spelling of timeseries_endime. RAE
+# 11/23/2004 - Added the mpe_show_missing_gage token.
+# 01/07/2005 - Added the sum_pc_reports token. This controls how PC-based
+# precipitation totals are derived.
+# 01/10/2005 - Added the sum_pc_reports token.
+# 01/28/2005 - Added AWIPS MODIFICATION BLOCK. When gmake is run in the
+# development tree location of .Apps_defaults, a copy of it
+# will be placed in /awips/hydroapps with the lines modified
+# in the AWIPS modification block to work in the /awips/hydroapps
+# tree.
+# 01/28/2005 - Modified the definitions of adb_shef_pro_err_dir and
+# adb_shef_pro_logs_dir.
+# Added the pghost, and pguser, pgport tokens for PostGres.
+# 04/21/2005 - Changed shefdecode_host and gage_pp_host to dx.
+# 04/28/2005 - Added hv_min_dur_filled token. Added ppp_ppd_local_7am_window
+# token.
+# 5/5/2005 - Added SSHP tokens sshp_initial_forecast_length, sshp_max_forecast_length,
+# sshp_sac_update_expiration_hours, sshp_sac_update_hours_forward.
+# Moved sshp_fcst_ts to be next to the rest of the SSHP tokens.
+# 5/11/2005 - Changed pguser token value to pguser.
+# 6/9/2005 - Changed value of grib_rls (location of gribit executable)
+# - Added new tokens mpe_d2d_display_grib, d2d_input_dir, mpe_send_grib
+# 6/15/2005 - Changed value for d2d_input_dir token
+# 9/13/2005 - Replaced the edit_poly token with the rfcwide_drawpre_dir
+# token. This directory will contain the precip edit polygons
+# drawn in Hydroview/MPE and applied in MPE Fieldgen.
+# 9/22/2005 - Added the rfcwide_gageloc_dir and rfcwide_beamheight_dir tokens.
+# 9/27/2005 - Added the hdb_db_name token. Contains the name of the database
+# used by the historical data browser.
+#10/6/2005 - Modified the value of the rfcwide_utiltriangles_dir token to
+# be under local/data/app/mpe instead of local/data/mpe.
+#10/6/2005 - Added the mpe_base_radar_mosaic token.
+#02/7/2006 - Added the mpe_split_screen token.
+#02/8/2006 - Added tokens for the PDC Preprocessor
+#02/9/2006 - Added mpe_polygon_action_order and mpe_polygon_field_order
+# tokens.
+#03/2/2006 - Added new tokens for DailyQC. Added renamed MPE tokens.
+#04/19/2006 - Added new tokens for controling the orientation/appearance
+# of the historical data browser and the locations of the help
+# and configuration directory.
+#05/30/2006 - Modified the token values for datview_plot_font and anav_data.
+# Added the following tokens for archive database programs:
+# adb_shef_pro_tmp_dir, adb_shef_raw_tmp_dir,
+# adb_shef_raw_add_adjust, rax_pghost, adb_name
+#05/30/2006 - Added the mpe_send_qpe_to_sbn token.
+#06/06/2006 - Added the grib_set_subcenter_0 token.
+#07/07/2006 - Added the ifp_griddb_dir token.
+#09/05/2006 - Added the dhm_d2d_data_dir and dhm_d2d_notify_dir tokens.
+#10/02/2006 - Added the sshp_map_qpe_to_use token.
+#11/02/2006 - Added the mpe_qpe_grib_sbn_dir token.
+#11/17/2006 - Added the mpe_qpe_sbn_dir token.
+#05/08/2007 - Added tokens for the rfc bias transfer project.
+#05/09/2007 - Added 3 tokens for SRG field directories
+#05/14/2007 - Added token for rdhm input directory
+#O5/23/2007 - Added sshp_show_simulated_timeseries, changed sshp_background_fcst_length to
+# sshp_background_forecast_length
+#05/23/2007 - Add tokens for RiverPro: rpf_endtime_shifthrs,
+# show_vtecqc_window, event_expire_withinhr
+#06/18/2007 - Added the send_local_bias_when_rfc_bias_missing token.
+# Biasmesgen reads this token to determine whether or not
+# to send the locally generated MPE bias to the RPG if
+# the RFC bias is not available.
+#06/28/2007 - Added DailyQC preprocessor token dqc_preprocessor_basetime
+#07/17/2007 - Added rgb_file_path token. Used by new Color Manager in Hydroview
+# and MPE Editor.
+#10/24/2007 - Added dhm_rain_plus_melt_data_dir token
+#11/08/2007 - Added tokens for IHFS->RAX Synchronization: adb_sync_logs_dir,
+# adb_sync_mode, adb_sync_tablenames, adb_sync_ihfs_ingest, adb_sync_rivercrit
+#1/16/2008 - added new tokens for disagg processing
+# mpe_disagg_execute, mpe_disagg_method, mpe_disagg_6hreq_0,mpe_disagg_6hrgt_0
+#3/22/2008 - Added variable substitution for database port.
+#
+#3/5/2008 - Modified the value of the mpe_mmosaic_dir token. There was a typo in the
+# product name. It was mrmosaic. It is now mmosaic.
+#05/19/2008 - Added sshp_hpn_minutes_before and sshp_hpn_minutes_after tokens.
+# These tokens define the time window for the SSHP HPN Prerocessor.
+#07/07/08 - Added sshp_show_unadjusted_states // for sshp
+#
+#10/01/09 - Added 5 tokens for arcnav application. //only for arcnav for raxum application
+#10/03/12 - Added token section for script execution
+
+
+# ==============================================================================
+# To see syntax rules for this file, see the bottom of this file
+#
+# Also see .Apps_defaults_site for overriding settings
+#
+
+#$=============================================================================
+#$ This section contains the tokens whose values are different between the
+#$ development and the delivery tree. The value give is the development
+#$ value. The commented value is the delivery value. The uncommented value
+#$ is in the development tree. All of these tokens must be enclosed
+#$ by the AWIPS_MODIFICATION_BLOCK_BEGIN and AWIPS_MODIFICATION_BLOCK_END
+#$ tags. Token names and commented lines should at column 1.
+
+#AWIPS_MODIFICATION_BLOCK_BEGIN
+
+apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory
+
+data_archive_root : /data_store # root directory of the data archive
+
+mcp3_icp_iface : $(HOME)/mcp3_ntrfc
+#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc
+
+verify_dir : $(apps_dir)/rfc/verify #base verify directory
+#verify_dir : /rfc_arc/verify #base verify directory
+
+vsys_dir : $(apps_dir)/rfc/verify #base verify directory
+#vsys_dir : $(verify_dir) #base verify directory
+
+#AWIPS_MODIFICATION_BLOCK_END
+
+#===================== Apps/Script Execution Tokens =================================
+WhfsSrv : ON
+WhfsSrv.purge_files : ON
+WhfsSrv.run_db_purge : ON
+WhfsSrv.run_floodseq : ON
+PprocSrv : ON
+PprocSrv.purge_mpe_files : ON
+PprocSrv.purge_hpe_file : ON
+MpeFieldGenSrv.run_mpe_fieldgen : ON
+WhfsSrv.run_pdc_pp : ON
+WhfsSrv.run_alarm_whfs : ON
+WhfsSrv.run_alarm_whfs.run_roc_checker : ON
+WhfsSrv.run_alarm_whfs.run_report_alarm : ON
+WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON
+ArealQpeGenSrv : ON
+DqcPreProcSrv : ON
+DqcPreProcSrv.run_dqc_preprocessor : ON
+MpeRUCFreezingLevel : ON
+MpeLightningSrv : ON
+#====================================================================================
+
+# ==============================================================================
+
+# Executable directory tokens.
+sys_java_dir : /awips2/java # Location of Java COTS software
+hydro_publicbin : $(apps_dir)/public/bin
+sqlcmd_bin_dir : /usr/local/sqlcmd/bin # location of sqlcmd executable on both HP and
+ # Linux beginning in OB3
+
+#################################################################################
+# Default Display Maps - comma separated list of maps with no spaces
+# Map names can be found in the localization perspective under
+# CAVE->Bundles->Maps. Use the filename without the extension.
+# statesCounties.xml -> statesCounties
+#
+# display_maps - default display maps for Hydro Perspective
+# mpe_display_maps - default display maps for MPE Perspective
+display_maps : statesCounties
+mpe_display_maps : statesCounties
+#################################################################################
+
+# database selection tokens
+server_name : ONLINE # Informix database server name
+db_name : hd_ob92lwx # IHFS database name
+damcat_db_name : dc_ob5xxx # Dam Catalog database name
+hdb_db_name : ob81_histdata # Historical database.
+pghost : localhost # The machine PostGres is running on
+pguser : awips # The user allowed to access PostGres
+pgport : 5432 # The PostGres Server port
+adb_name : adb_ob7xxx # RFC archive database name
+rax_pghost : ax # The machine PostGres is running on for the adb
+
+# vacuum log dir token.
+vacuum_log_dir : $(whfs_log_dir)/vacuum
+
+# WHFS specific tokens
+whfs_tz : EST5EDT # WHFS time zone for local time
+whfs_primary_radar : TLX # WHFS primary radar id, for Stage II
+
+# damcat tokens
+damcat_hostoffice_type : wfo # source of run-from office
+damcat_office_datasource : ohd # which data source is used
+max_storage_value : 0.00 # max storage volume filter
+damcat_data : /tmp/damcatData
+
+# Damcrest tokens
+damcrest.db_enabled : true # set to true when the user has damcat database
+damcrest.hasListAllDams : true # when set to true, all dams will be displayed initially
+
+# Path to the editor used by Damcrest
+damcrest.editor : /usr/bin/gvim
+
+# Path to the damcrest data directory where input and output files
+# of the model are stored
+damcrest_data_dir : $(whfs_local_data_dir)/damcrest
+
+# Path to the directory where .vimrc resource file resides.
+# This resource file is needed when editor in Damcrest application
+# is set to gvim.
+damcrest_res_dir : $(whfs_config_dir)/damcrest
+
+#===================== SHEFDECODE Application Tokens ================================
+
+shefdecode_userid : oper # controlling UNIX user
+shefdecode_host : dx1f # controlling UNIX system.
+shefdecode_dir : $(apps_dir)/shefdecode # main directory location
+shefdecode_bin : $(shefdecode_dir)/bin # executable programs location
+shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location
+shef_data_dir : /data/fxa/ispan/hydro # input products location
+
+shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location
+shef_error_dir : $(shefdecode_dir)/logs/product # product log files location
+shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or
+ # only when errors occur (=IF_ERROR)
+shef_perflog : ON # ON/OFF - create a separate performance log file to
+ # save internal decoder timing messages for
+ # monitoring performance
+shef_data_log : ON # ON/OFF - include messages in the log file detailing
+ the SHEF records
+dupmess : ON # ON/OFF - include messages in the log file about
+ # duplicate data
+elgmess : ON # ON/OFF - include messages in the log file about
+ # data types not found in IngestFilter or
+ # data types turned off in IngestFilter
+locmess : ON # ON/OFF - include messages in the log file about
+ # stations and areas not found in Location
+ # or GeoArea
+
+shef_sleep : 10 # sleep duration in seconds in between queries
+shef_winpast : 10 # number of days in past to post data
+shef_winfuture : 30 # number of minutes in future to post obs data
+shef_duplicate : IF_DIFFERENT # flag for handling duplicate date
+ # ALWAYS_OVERWRITE-always overwrite when value repeats
+ # USE_REVCODE-if revcode set overwrite duplicate value
+ # IF_DIFFERENT-overwrite if new value is different
+ # IF_DIFFERENT_OR_REVCODE-overwrite if new value is
+ # different or revcode is set
+shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not
+ # with (station id-PEDTSE) combinations as they
+ # arrive in the input data flow
+shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages
+ # to the TextProduct table
+shef_post_unk : NONE # NONE - do not post to the UnkStn nor UnkStnValue tables
+ # IDS_ONLY - post only location identifiers for unknown
+ # stations to the UnkStn table
+ # IDS_AND_DATA - post all data from unknown stations to
+ # the UnkStnValue table
+shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross range
+ # check to the physical element data tables (=PE) OR
+ # to the RejectedData table (=REJECT)
+shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to
+ # the observation data tables (=ON) or to
+ # the ProcValue table (=OFF)
+shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table
+ # VALID_ONLY - post data to the LatestObsValue table
+ # ONLY if the gross range check is passed
+shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table
+shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast
+ # height or discharge data being posted, load
+ # the maximum forecast data into the RiverStatus table
+shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against
+ # alert and alarm thresholds
+# -- Intermediate output from ShefParser prior to post
+shef_out : OFF
+
+
+#===================== WHFS Applications Tokens ================================
+
+whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree
+whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree
+whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree
+whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree
+whfs_log_dir : $(whfs_local_data_dir)/log # top of WHFS logs tree
+
+whfs_local_bin_dir : $(whfs_local_dir)/bin # local WHFS executables
+
+whfs_geodata_dir : $(whfs_local_data_dir)/geo # WHFS map backgrounds
+whfs_image_dir : $(whfs_local_data_dir)/image # user-saved image files
+whfs_import_dir : $(whfs_local_data_dir)/import # files to import into WHFS
+whfs_product_dir : $(whfs_local_data_dir)/product # WHFS generated external products
+whfs_report_dir : $(whfs_local_data_dir)/report # user-saved text reports
+whfs_lines_per_page : 60
+
+whfs_config_dir : $(whfs_local_data_dir)/app # WHFS app configuration files
+rpf_template_dir : $(RPF_TEMPLATE_DIR) # RiverPro templates
+metar_config_dir : $(whfs_config_dir)/metar2shef # METAR translator config
+metar2shef_options : " -a -b -p1 -y2k -salias -p6 -p24 -round -w -strip "
+ts_config_dir : $(whfs_config_dir)/timeseries # Time Series config
+hv_config_dir : $(whfs_config_dir)/hydroview # Hydroview pixmaps etc.
+hv_help_dir : $(hv_config_dir)/help/ # Hydroview Help direc.
+rivermon_config_dir : $(whfs_config_dir)/rivermon/ # RiverMonitor Conf dir.
+
+whfs_misc_grid_dir : $(whfs_local_grid_dir)/misc # misc WHFS grids
+
+rgb_file_path : /usr/share/X11/rgb.txt # Location of X/Motif color file.
+
+rpf_log_dir : $(RPF_LOG_DIR) # RiverPro logs
+rivermon_log_dir : $(whfs_log_dir)/rivermon # RiverMonitor logs
+obsfcstmonitor_log_dir : $(whfs_log_dir)/obsfcst_monitor # ObsFcstMonitor logs
+whfs_util_log_dir : $(whfs_log_dir)/misc # WHFS misc logs
+precip_accum_log_dir : $(whfs_log_dir)/precip_accum # precip_accum logs
+floodseq_log_dir : $(whfs_log_dir)/floodseq # flood sequencer logs
+metar_log_dir : $(whfs_log_dir)/metar2shef # METAR translator logs
+hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator logs
+qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs
+
+db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token
+db_purge_backup_retention_use : ON # db_purge token for using backup retention value
+
+purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token
+
+whfs_bin_dir : $(whfs_base_dir)/bin # WHFS executables
+sws_parent_dir : $(whfs_bin_dir) # SWS parent dir
+sws_home_dir : $(whfs_bin_dir)/pa # SWS dir
+
+# -----------------------------------------------------------------
+# The Gage Precip Processor tokens
+# -----------------------------------------------------------------
+
+gage_pp_userid : oper # controlling UNIX user
+gage_pp_host : dx # controlling UNIX system
+gage_pp_data : $(pproc_local_data)/gpp_input # input data files location
+gage_pp_log : $(pproc_log)/gage_pp # daily log files location
+gage_pp_sleep : 10 # sleep duration in seconds in between queries
+gage_pp_enable : ON # gpp enabled; shef uses to determine post
+shef_post_precip : OFF # post to Precip/CurPrecip tables
+build_hourly_enable : ON # Enable the build_hourly application
+
+# ----------------------------------------------------------------
+# The following tokens are most likely to be customized by the user
+# (the first 4 MUST be customized at each site in the .Apps_defaults_site file)
+# ----------------------------------------------------------------
+hv_center_lat : 35.0 # HydroView center latitude
+hv_center_lon : -97.8 # HydroView center longitude
+hv_height_in_pixels : 900 # Hydroview map height in pixels
+hv_width_in_pixels : 1200 # Hydroview map width in pixels
+hv_map_width : 320 # HydroView map width (nautical miles)
+hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF)
+hv_hours_in_window : 4 # Change window hours
+hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out
+hv_disclosure_limit : 60 # Prog disclosure limit
+hv_zoom_threshold : 150 # nautical miles; Hydroview
+ # detail level for cities/towns
+hv_map_projection : FLAT # Sets default map projection used in
+ # hydroview/MPE. Options are FLAT, POLAR
+ # or HRAP.
+hv_refresh_minutes : 15 # HydroView auto refresh time (minutes)
+hv_riverbasis : maxobsfcst # initial river basis for river characteristics
+hv_min_dur_filled : 0.0 # Minimum percentage of accum interval covered
+ # by precip data.
+ppp_ppd_local_7am_window : 3 # Number of +/- hours around 7 AM local to
+ # to use PPP and PPD reports for 24 hour
+ # precip summaries.
+ # values either obs, fcst, maxobsfcst
+shefencode_prodid : CCCCNNNXXX # product identifier for outgoing SHEF
+ # encoded messages from Hydro Time Series
+whfs_editor : whfs_editor # WHFS text editor
+rpf_linewidth : 80 # width of line in RiverPro generated products
+rpf_min_dur_filled : 0.25 # min percent time of requested precip dur in RiverPro
+office_prefix : K # fourth char prepended to 3-char office id
+vtec_record_stageoffset : 2.0 # ft offset from record value for H-VTEC field
+vtec_record_flowoffset : 5000.0 # cfs offset from record value for H-VTEC field
+pproc_s2_gridgen_hrs : 5 # WHFS Stage II lookback (hours)
+whfs_min_dur_filled : 0.83 # WHFS min fractional time duration needed for radar accumulations
+whfs_min_area_covered : 0.80 # WHFS min fractional area needed to compute MAPs
+whfs_printcommand_HP : lp # command used to print WHFS apps reports on HP
+whfs_printcommand_LX : lp # command used to print WHFS apps reports
+ # on LX
+whfs_e19_print_command : "lp -o cpi=19 -o lpi=7" # command used to print e19 text reports
+
+dam_icon_color : BROWN # Color used for dam icon in Hydroview
+timeseries_begintime : 5 # number of days back relative to current time
+timeseries_endtime : 3 # number of days ahead relative to current time
+timeseries_showcat : 2 # scale by data and show categories
+timeseries_linewidth : 1 # width of line drawn on graph
+timeseries_mode : STATION # set to GROUP or STATION mode
+timeseries_dist_shef : OFF # ON/OFF token for the shef send script distribute check box
+ # Defaults to off if not set
+rpf_stage_window : 0.5 # set stage window for determining the trend
+ # variables in RiverPro
+show_vtecqc_window : IF_ERROR #or ALWAYS, used in RiverPro
+rpf_endtime_shifthrs : 6 # in RiverPro
+event_expire_withinhr : 3 # in RiverPro
+
+#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP=====
+# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office)
+
+gaff_execution : ON # ON/OFF token for the gen_areal_ffg process
+ # the gen_areal_ffg process is run from the
+ # process_dpa_files script at WFOs
+gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked
+ # list is comma separated, no embedded
+ # spaces are allowed
+gaff_input_dir : $(EDEX_HOME)/data/processing
+ # directory containing gridded FFG
+ # generated by RFCs
+gaff_look_back_limit : 60 # number of hours to look back for valid gridded
+ # FFG data for input
+gaff_mosaic_dir : $(whfs_misc_grid_dir) # directory containing output
+ # mosaicked gridded FFG in
+ # netCDF format
+gaff_durations : 1,3,6 # FFG durations in hours
+ # list is comma separated, no embedded
+ # spaces are allowed
+
+
+# ================= "ds_" system tokens (see more in site file) ===============
+
+ofs_dir : $(apps_dir)/rfc/nwsrfs/ofs
+util_dir : $(apps_dir)/rfc/nwsrfs/util
+calb_dir : $(apps_dir)/rfc/nwsrfs/calb
+ifp_dir : $(apps_dir)/rfc/nwsrfs/ifp
+icp_dir : $(apps_dir)/rfc/nwsrfs/icp
+ens_dir : $(apps_dir)/rfc/nwsrfs/ens
+fld_dir : $(apps_dir)/rfc/fld
+
+
+hdb_dir : $(apps_dir)/rfc/hdb
+
+# = = = = = = = = = = = = = = = = = = = = = = end "ds_" system requirements = =
+
+ofs_rls : $(ofs_dir)/bin/RELEASE
+util_rls : $(util_dir)/bin/RELEASE
+calb_rls : $(calb_dir)/bin/RELEASE
+ffg_rls : $(ffg_dir)/bin/RELEASE
+ifp_rls : $(ifp_dir)/bin/RELEASE
+icp_rls : $(icp_dir)/bin/RELEASE
+ens_rls : $(ens_dir)/bin/RELEASE
+hdb_rls : $(hdb_dir)/bin/RELEASE
+fld_rls : $(fld_dir)/bin/RELEASE
+xsets_rls : $(xsets_dir)/bin/RELEASE
+xnav_rls : $(xnav_dir)/bin/RELEASE
+xdat_rls : $(xdat_dir)/bin/RELEASE
+
+ofs_arc : $(ofs_dir)/bin/ARCHIVE
+util_arc : $(util_dir)/bin/ARCHIVE
+calb_arc : $(calb_dir)/bin/ARCHIVE
+ffg_arc : $(ffg_dir)/bin/ARCHIVE
+ifp_arc : $(ifp_dir)/bin/ARCHIVE
+icp_arc : $(icp_dir)/bin/ARCHIVE
+ens_arc : $(ens_dir)/bin/ARCHIVE
+hdb_arc : $(hdb_dir)/bin/ARCHIVE
+fld_arc : $(fld_dir)/bin/ARCHIVE
+xsets_arc : $(xsets_dir)/bin/ARCHIVE
+xnav_arc : $(xnav_dir)/bin/ARCHIVE
+xdat_arc : $(xdat_dir)/bin/ARCHIVE
+# = = = = = = = = = = = = = = = = = = = = = = end of other "ds_" tokens = = = =
+
+# LDAD shefencode tokens
+ldad_data_dir : /awips/ldad/data # the LDAD internal data dir
+shefenc_pe_table : $(ldad_data_dir)/ShefEncoder_PE.tbl
+shefenc_units_table : $(ldad_data_dir)/ShefEncoder_Units.tbl
+
+# NWSRFS tokens
+
+rfs_dir : $(apps_dir)/rfc/nwsrfs # Top-level rfs mt.
+rfs_sys_dir : $(rfs_dir)/sys_files # RFS system files
+rfs_doc : $(rfs_dir)/doc # NWSRFS documentation
+
+# OFS tokens
+locks_dir : $(rfs_dir)/locks
+ofs_lock_max_wait : 60 # no. of mins to wait to get an ofs lock
+ofs_lock_wait_interval : 5 # no. of secs 'tween retries to get an ofs lock
+ofs_locks_max_pass : 4 # no. of attempts to make to get a set of locks.
+
+ofs_level : oper
+ofs_reor_lvl : oper_new
+ofs_inpt_grp : oper
+
+home_files_workstation : ds
+
+ofs_log_output : off # whether to output file r/w info
+ofs_error_output : on # whether to output file error info
+fortran_stderr : 7 # FORTRAN standard error unit
+
+ofs_bin : $(ofs_dir)/bin # OFS executables dir
+ofs_files : $(ofs_dir)/files # OFS file group
+ofs_fs5files : $(ofs_files)/$(ofs_level)/fs5files # OFS files dir
+ofs_reorder_dir : $(ofs_files)/$(ofs_reor_lvl)/fs5files # OFS reordered files
+ofs_output : $(ofs_dir)/output # OFS output dir
+ofs_input : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
+ofs_input_dflt : $(ofs_dir)/input/$(ofs_inpt_grp) # OFS input dir
+ofs_shefdata_dir: $(ofs_files)/$(ofs_level)/shefdata # OFS SHEF data dir
+ofs_shefout_dir : $(ofs_files)/$(ofs_level)/shefdata # OFS shefout file dir
+ofs_mods_dir : $(ofs_files)/$(ofs_level)/mods # OFS MODS files dir
+ofs_griddb_dir : $(ofs_files)/$(ofs_level)/griddb # OFS gridded fields
+ofs_scripts : $(ofs_dir)/scripts # OFS scripts dir
+ofs_server : apwk01g2 # OFS "slave" server
+my_output : $(ofs_output)/$(LOGNAME) # users ofs output files
+
+ndfd2rfs_input : $(ofs_files)/$(ofs_level)/ndfd
+ndfd2rfs_output : $(my_output)
+ndfd2rfs_log_level : 0
+
+fldview_dir : $(apps_dir)/rfc/fldview/floodmapdata
+
+# calb tokens
+calb_bin : $(calb_dir)/bin
+calb_lib : $(calb_dir)/lib
+
+calb_data_grp : oper
+calb_inpt_grp : oper
+calb_input : $(calb_dir)/input/$(calb_inpt_grp)
+calb_output : $(calb_dir)/output
+calb_sta_ts_dir : $(calb_dir)/data/sta_ts/$(calb_data_grp)
+calb_area_ts_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
+peakflow_data_dir : $(calb_dir)/data/area_ts/$(calb_data_grp)
+
+calb_gzio_read : off # whether or not to read gzipped DATACARD files
+calb_gzio_write : off # whether or not to write gzipped DATACARD files
+
+nwsrfs_calbfile_default : CARD # default calibration file type
+nwsrfs_platform : AIX # operating system
+
+# ICP tokens
+icp_bin : $(icp_dir)/bin
+icp_pw : hILLEL
+icp_scripts : $(icp_dir)/scripts
+
+mcp_decks : $(calb_input)/mcp3
+mcp_dir : $(calb_rls)
+
+# IFP tokens
+ifp_help_dir : $(ifp_dir)/help_files # IFP help files
+ifp_bin_dir : $(ifp_dir)/bin/RELEASE # IFP bin files - ref in code
+ifp_nwsrfs_bin_dir : $(ifp_dir)/bin/RELEASE # ifp_nwsrfs bin - ref in code
+ifp_sys_dir : $(ifp_dir)/system # IFP system files
+ifp_scripts_dir : $(ifp_dir)/scripts # IFP script files
+ifp_options_dir : $(ifp_dir)/options # IFP options files
+ifp_colors_dir : $(ifp_options_dir)/colors # IFP color files
+ifp_fs5files : $(HOME)/ofs_ifp/fs5files # user copy of fs5files
+ifp_rfc : host # name of RFC to run
+ifp_num_columns : 3 # number of columns to display
+ifp_gif_files : $(ofs_files)/$(ofs_level)/gif_files # gif files directory
+ifp_sacco_dir : $(ofs_files)/$(ofs_level)/sacsnow_clim
+ifp_dhm_data_dir : /data/dhm/$(LOGNAME)
+ifp_griddb_dir : $(ifp_dhm_data_dir)/precip
+
+# Ensemble (ens) tokens
+
+espts_dir : $(ens_dir)/files/$(ofs_level)/espts #espts files esp
+espadp_dir : $(ens_dir)
+preadj_dir : $(ens_dir)/files/$(ofs_level)/cpc_fcsts
+ens_input : $(ens_dir)/input/$(ofs_level)
+ens_output : $(ens_dir)/output
+ens_files : $(ens_dir)/files/$(ofs_level)
+ens_scripts : $(ens_dir)/scripts
+
+# ens_pre tokens
+##FXA_HOME : /px1data #taken out by kwz.2/11/04
+enspre_griddb : $(FXA_DATA)/Grid/SBN/netCDF/CONUS211/CPCoutlook
+ens_log_dir : $(ens_output)/$(ofs_level)
+ens_msglog_level : 5
+preadj_outts_dir : $(calb_area_ts_dir)/pre
+
+# FLDGRF tokens (added 6 April 2000)
+
+fldgrf_iface : $(HOME)/fldgrf
+
+# ofsde tokens
+
+ofsde_log_dir : $(ofs_output)/ofsde_logs # ofsde log dir
+ # (formerly ofsde_output_dir)
+ofsde_ndate : 7 # number of days to search for forecast temps
+ofsde_rrstime_check : OFF # flag to check obs times of RRS data
+ # against window around 12Z (OFF/ON)
+
+# intervals for max/min temperatures (used by ofsde)
+# these represent number of hours around 12z
+
+intlrmn : 8
+inturmn : 2
+intlrzn : 2
+inturzn : 2
+intlrzx : 8
+inturzx : 2
+siipp_calc_624_PP : OFF # flag for calculating 6hr and 24hr
+ # PP data from PC data
+ # if running RFCWide, should be set to OFF
+
+# defaults for geographic data
+
+geo_data : $(apps_dir)/geo_data
+geo_util : $(geo_data)/util
+
+geo_ifp_bin : $(geo_data)/$(ifp_rfc)/binary
+geo_ifp_ascii : $(geo_data)/$(ifp_rfc)/ascii
+
+#===================== PRECIP_PROC Application Tokens ========================
+
+# precip_proc directory
+
+pproc_dir : $(apps_dir)/precip_proc # precip proc top
+ # level dir
+pproc_bin : $(pproc_dir)/bin # dir with precip proc exes
+pproc_local : $(pproc_dir)/local # dir with local items, esp. data
+pproc_local_data : $(pproc_local)/data # dir with local data
+pproc_local_bin : $(pproc_local)/bin # dir with local bin
+pproc_log : $(pproc_local_data)/log # dir with local logs
+
+pproc_util_log_dir : $(pproc_log)/misc # miscellaneous logs
+
+# DecodeDPA tokens (formerly DecodeHDP tokens that looked like hdp_*)
+
+dpa_log_dir : $(pproc_log)/decodedpa # DPA Decoder logs
+dpa_prod_dir : /data/fxa/ispan/hdp # DPA input directory
+dpa_gather : $(pproc_local_data)/dpa_gather # DPA gather directory
+dpa_error_dir : $(pproc_local_data)/stage1_error # DPA error files
+dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives
+dpa_wind : 10
+
+
+dpa_filter_decode : ON # flag for non-top-of-hour
+ # filtering of decoded products
+ # ON - filter products for decode
+ # OFF - do not filter (ie decode all products)
+
+dpa_decode_window : 10 # number of minutes around top
+ # of hour for filtering products for
+ # decoding
+
+dpa_archive : OFF # ON/OFF flag for archiving products
+ # OFF - do not archive products
+ # ON - archive products and filter based
+ # on value of dpa_archive_window
+
+dpa_archive_window : 10 # number of minutes around top
+ # of hour for filtering products for archiving
+
+dpa_dirname1 : $(data_archive_root)/radar # first part of directory name
+ # containing DPA products for
+ # associated or dial in radars
+dpa_dirname2 : DPA/layer0/res4/level256 # second part of directory name
+ # containing DPA products for
+ # associated or dial in radars
+dpa_grid_dir : $(pproc_local_data)/stage1_decoded # decoded DPA radar grids
+
+# siipp tokens
+
+intpc : 10 # interval (minutes) around top of hour for using PC data
+intlppp : 2
+intuppp : 2
+intppq : 2
+siipp_log_dir : $(pproc_log)/siipp # Stage II preprocessor logs
+ # (formerly siipp_output_dir)
+
+# tokens for stageiii
+st3_help : $(pproc_local_data)/app/stage3/help # online help text
+
+st3_rfc : host
+awips_rfc_id : TUA # 3 char AWIPS RFC identifier
+ # must be all upper case
+
+# tokens for stageiii output
+st3_mapx_id : xmrg # identifier for Stage 3 output
+st3_date_form : mdY # date format
+ # current allowable = Ymd or mdY
+ # similar to formatting codes for
+ # strftime function
+
+st3_output : $(ofs_griddb_dir) # dir for xmrg files for MAPX
+ # ofs_griddb_dir defined outside of pproc
+st3_out_dir : $(pproc_local_data)/stage3
+post_output : $(st3_out_dir)/post_analysis
+
+# defaults for netCDF output
+
+st3_netcdf_loc : arkansas_red_basin_river_forecast_center_tulsa_ok
+ # underscores needed between words
+st3_netcdf_swlat : 33.603
+st3_netcdf_swlon : 106.456
+st3_netcdf_selat : 32.433
+st3_netcdf_selon : 92.322
+st3_netcdf_nelat : 38.027
+st3_netcdf_nelon : 90.678
+st3_netcdf_nwlat : 39.420
+st3_netcdf_nwlon : 106.652
+
+#defaults for auto stageiii
+st3_auto_graphic_scale : 2.4 # used by gif file generation
+
+#===================== disagg Tokens (old disagg process)========================
+
+disagg_msglog_level : 30 # message level
+ # possible values are 1,10,20,30,...80
+ # lower values signify less info in log
+
+disagg_dur : 24 # maximum duration of precip gage data to
+ # be disaggregated
+ # possible values = 2,3,...,24
+
+disagg_look_back : 0 # time (hours) to look back from current hour
+ # for precip gage data to be disaggregated
+
+disagg_radius : 3 # number of HRAP bins within which the QPE
+ # will be averaged for disagg
+ # for example, if disagg_radius = 3, then
+ # the 9 nearest neighbor QPE bin values
+ # will be averaged
+disagg_set_date : 0 # identifier for current date (yyyymmdd).
+ # Default value is 0 - set to
+ # today date
+
+disagg_set_hour : 0 # identifier for current hour (hh).
+ # Default value is 0
+ # Possible values = 0,1,2,3,...,23
+
+disagg_log_dir : $(pproc_log)/disagg # directory containing disagg logs
+
+# =============== Multi-Sensor Precipitation Estimator (MPE) ================
+
+rfcw_rfcname : host
+rfcwide_logs_dir : $(pproc_log)/mpe_fieldgen
+hmap_mpe_timelapse : 1000 # time between images, in milliseconds, for the MPE
+ # time lapse display
+
+### tokens for input ###
+
+rfcwide_input_dir : $(pproc_local_data)/app/mpe
+
+rfcwide_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
+
+# the help_dir token needs a trailing slash because it is required byt
+# the RFC software the processes the help info...
+
+rfcwide_help_dir : $(rfcwide_input_dir)/help/
+rfcwide_misbin_dir : $(rfcwide_input_dir)/misbin
+rfcwide_prism_dir : $(rfcwide_input_dir)/prism
+rfcwide_gageloc_dir : $(rfcwide_input_dir)/gage_locations
+rfcwide_beamheight_dir : $(rfcwide_input_dir)/beam_height
+rfcwide_utiltriangles_dir : $(rfcwide_input_dir)/utiltriangles
+
+### tokens for output ###
+### NOTE: xmrg files are stored in dir defined by rfcwide_xmrg_dir token below
+
+rfcwide_output_dir : $(pproc_local_data)/mpe # fka ofs_griddb_dir defined outside of pproc
+
+rfcwide_gagetriangles_dir : $(rfcwide_output_dir)/gagetriangles
+rfcwide_drawpre_dir : $(rfcwide_output_dir)/draw_precip
+
+rfcwide_avg_rmosaic_dir : $(rfcwide_output_dir)/avgrmosaic
+rfcwide_max_rmosaic_dir : $(rfcwide_output_dir)/maxrmosaic
+rfcwide_rmosaic_dir : $(rfcwide_output_dir)/rmosaic
+rfcwide_bmosaic_dir : $(rfcwide_output_dir)/bmosaic
+rfcwide_mmosaic_dir : $(rfcwide_output_dir)/mmosaic
+rfcwide_mlmosaic_dir : $(rfcwide_output_dir)/mlmosaic
+rfcwide_lmosaic_dir : $(rfcwide_output_dir)/lmosaic
+rfcwide_lsatpre_dir : $(rfcwide_output_dir)/lsatpre
+rfcwide_gageonly_dir : $(rfcwide_output_dir)/gageonly
+
+rfcwide_height_dir : $(rfcwide_output_dir)/height
+rfcwide_index_dir : $(rfcwide_output_dir)/index
+rfcwide_locbias_dir : $(rfcwide_output_dir)/locbias
+rfcwide_locspan_dir : $(rfcwide_output_dir)/locspan
+rfcwide_p3lmosaic_dir : $(rfcwide_output_dir)/p3lmosaic
+
+rfcwide_xmrg_dir : $(rfcwide_output_dir)/qpe
+rfcwide_statevar_dir : $(rfcwide_output_dir)/state_var
+rfcwide_sat_statevar_dir : $(rfcwide_output_dir)/sat_state_var
+mpe_q2_statevar_dir : $(rfcwide_output_dir)/q2_state_var
+
+# ==================== MPE Tokens ===============================
+
+#daily qc options token defaults to 'off' where daily qc options are grayed out; values are 'on' and 'off'
+mpe_dqc_options : off
+mpe_map_background_color : GRAY20 # The default color of the MPE map background
+mpe_temperature_window : 60 # The window in minutes the dqc preprocessor
+ # searches around a synoptic time
+ # (00z,06z,12z,18z) for temperature data.
+mpe_maxminT_hour_window : 2
+mpe_dqc_max_precip_neighbors : 30
+mpe_dqc_max_temp_neighbors : 20
+mpe_dqc_precip_deviation : 3.0
+mpe_dqc_temperature_deviation : 10.0
+mpe_dqc_min_good_stations : 5
+mpe_copy_level2_dqc_to_ihfs_shef : OFF
+mpe_copy_level2_dqc_to_archive_shef : OFF
+mpe_dqc_num_days : 10
+mpe_dqc_warningpopup : on
+mpe_dqc_6hr_24hr_set_bad : OFF # Define logic if user sets a 6hr value to Bad in the
+ # Edit Precip Stations window.
+ # OFF – if user sets 6hr value to Bad; 24hr value unaffected
+ # ON - if user sets 6hr value to Bad; 24hr value set to Bad
+ # Added at request of MBRFC to help with QC of SNOTEL.
+
+mpe_dqc_grid_max_dist : 70 # Max distance (units of grid bins) between a grid bin and a
+ # station to use the station to estimate the value at the grid bin.
+
+mpe_dqc_output_qc_file : OFF # ON/OFF default = OFF
+
+mpe_dqc_execute_internal_script : OFF # ON/OFF
+
+mpe_dqc_24hr_precip_grid_meth : USE_24HR # We use the token values of ACCUM_6HR and USE_24HR
+mpe_td_new_algorithm : OFF # flag set for new algorithm in calculating Time Distributed estimate, the default
+ # is false
+mpe_dqc_gridtype : SCALAR
+mpe_dqc_projectiontype : POLAR_STEREOGRAPHIC
+mpe_dqc_lonorigin : -105.
+
+#daily qc preprocessor tokens
+dqc_preprocessor_basetime : 12Z #The value can be 12Z, 18Z, 00Z, or 06Z
+
+### MPE base directory tokens.
+mpe_dir : $(pproc_local_data)/mpe
+mpe_gageqc_dir : $(mpe_dir)/dailyQC
+mpe_scratch_dir : $(mpe_gageqc_dir)/scratch
+mpe_app_dir : $(pproc_local_data)/app/mpe
+mpe_fieldgen_product_dir : $(mpe_dir)
+
+### MPE station list tokens
+mpe_station_list_dir : $(mpe_app_dir)/station_lists
+mpe_site_id : ounx
+mpe_area_names : $(mpe_site_id)
+
+### MPE static data files
+mpe_prism_dir : $(mpe_app_dir)/prism
+mpe_misbin_dir : $(mpe_app_dir)/misbin
+mpe_utiltriangles_dir : $(mpe_app_dir)/utiltriangles
+mpe_beamheight_dir : $(mpe_app_dir)/beam_height
+mpe_climo_dir : $(mpe_app_dir)/climo
+mpe_help_dir : $(mpe_app_dir)/help
+mpe_gridmask_dir : $(mpe_app_dir)/grid_masks
+mpe_basin_file : $(whfs_geodata_dir)/basins.dat
+
+### MPE precipitation gage qc directories
+mpe_precip_data_dir : $(mpe_gageqc_dir)/precip
+mpe_bad_precip_dir : $(mpe_precip_data_dir)/bad
+mpe_dev_precip_dir : $(mpe_precip_data_dir)/dev
+mpe_map_dir : $(mpe_precip_data_dir)/MAP
+mpe_grid_precip_dir : $(mpe_precip_data_dir)/grid
+mpe_point_precip_dir : $(mpe_precip_data_dir)/point
+
+### MPE temperature gage qc directories
+mpe_temperature_data_dir : $(mpe_gageqc_dir)/temperature
+mpe_bad_temperature_dir : $(mpe_temperature_data_dir)/bad
+mpe_dev_temperature_dir : $(mpe_temperature_data_dir)/dev
+mpe_mat_dir : $(mpe_temperature_data_dir)/MAT
+mpe_grid_temperature_dir : $(mpe_temperature_data_dir)/grid
+mpe_point_temperature_dir : $(mpe_temperature_data_dir)/point
+
+### MPE freezing level gage qc directories
+mpe_freezing_data_dir : $(mpe_gageqc_dir)/freezing_level
+mpe_maz_dir : $(mpe_freezing_data_dir)/MAZ
+mpe_grid_freezing_dir : $(mpe_freezing_data_dir)/grid
+mpe_point_freezing_dir : $(mpe_freezing_data_dir)/point
+ruc_model_data_dir : /data/fxa/Grid/SBN/netCDF/CONUS211/RUC
+
+### MPE 1 hour mosaics and fields and supporting reference fields.
+mpe_avgrmosaic_dir : $(mpe_fieldgen_product_dir)/avgrmosaic
+mpe_maxrmosaic_dir : $(mpe_fieldgen_product_dir)/maxrmosaic
+mpe_bmosaic_dir : $(mpe_fieldgen_product_dir)/bmosaic
+mpe_d2d_files_dir : $(mpe_fieldgen_product_dir)/d2d_files
+mpe_polygon_dir : $(mpe_fieldgen_product_dir)/edit_polygon
+mpe_gageonly_dir : $(mpe_fieldgen_product_dir)/gageonly
+mpe_gagetriangles_dir : $(mpe_fieldgen_product_dir)/gagetriangles
+mpe_height_dir : $(mpe_fieldgen_product_dir)/height
+mpe_index_dir : $(mpe_fieldgen_product_dir)/index
+mpe_lmosaic_dir : $(mpe_fieldgen_product_dir)/lmosaic
+mpe_locbias_dir : $(mpe_fieldgen_product_dir)/locbias
+mpe_locspan_dir : $(mpe_fieldgen_product_dir)/locspan
+mpe_lsatpre_dir : $(mpe_fieldgen_product_dir)/lsatpre
+mpe_mlmosaic_dir : $(mpe_fieldgen_product_dir)/mlmosaic
+mpe_mmosaic_dir : $(mpe_fieldgen_product_dir)/mmosaic
+mpe_qmosaic_dir : $(mpe_fieldgen_product_dir)/qmosaic
+mpe_lqmosaic_dir : $(mpe_fieldgen_product_dir)/lqmosaic
+mpe_mlqmosaic_dir : $(mpe_fieldgen_product_dir)/mlqmosaic
+mpe_p3lmosaic_dir : $(mpe_fieldgen_product_dir)/p3lmosaic
+mpe_qpe_dir : $(mpe_fieldgen_product_dir)/qpe
+mpe_qpe_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_sbn
+mpe_qpe_gif_dir : $(mpe_fieldgen_product_dir)/qpe_gif
+mpe_qpe_grib_dir : $(mpe_fieldgen_product_dir)/qpe_grib
+mpe_qpe_grib_sbn_dir : $(mpe_fieldgen_product_dir)/qpe_grib_sbn
+mpe_qpe_jpeg_dir : $(mpe_fieldgen_product_dir)/qpe_jpeg
+mpe_qpe_netcdf_dir : $(mpe_fieldgen_product_dir)/qpe_netcdf
+mpe_rmosaic_dir : $(mpe_fieldgen_product_dir)/rmosaic
+mpe_sat_state_var : $(mpe_fieldgen_product_dir)/sat_state_var
+mpe_state_var : $(mpe_fieldgen_product_dir)/state_var
+mpe_srmosaic_dir : $(mpe_fieldgen_product_dir)/srmosaic
+mpe_sgmosaic_dir : $(mpe_fieldgen_product_dir)/sgmosaic
+mpe_srgmosaic_dir : $(mpe_fieldgen_product_dir)/srgmosaic
+mpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
+mpe_rfcmmosaic_dir : $(mpe_fieldgen_product_dir)/rfcmmosaic
+mpe_rfcbmosaic_dir : $(mpe_fieldgen_product_dir)/rfcbmosaic
+mpe_localfield1_dir : $(mpe_fieldgen_product_dir)/localfield1
+mpe_localfield2_dir : $(mpe_fieldgen_product_dir)/localfield2
+mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3
+
+### Tokens related to the MPE Editor map display.
+mpe_config_dir : $(whfs_config_dir)
+mpe_center_lat : 39.8
+mpe_center_lon : -98.55
+mpe_height_in_pixels : 900
+mpe_width_in_pixels : 1200
+mpe_map_width : 1320
+mpe_zoom_out_limit : 20
+mpe_disclosure_limit : 60
+mpe_map_projection : FLAT
+
+### Misc tokens
+mpe_load_hourlypc : ON
+mpe_gageqc_gif_dir : $(whfs_image_dir)
+mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0
+mpe_overlay_dir : $(whfs_geodata_dir)
+mpe_editor_logs_dir : $(pproc_log)/mpe_editor
+mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP
+
+### New tokens for DQC/CHPS
+mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2”
+mpe_td_details_set : OFF # Allow generating a time distribution details file.
+mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF"
+mpe_map_one_zone : OFF # Allow MAP generation for one zone only
+fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir
+nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file
+netcdf_dir : $(mpe_gageqc_dir)/netcdf_files #default output directory for netcdf files
+mpe_dqc_save_netcdf : OFF # Save Daily QC as netCDF
+mpe_dqc_save_grib : OFF # Save Daily QC as grib
+
+### Tokens which control the products generated by MPE Fieldgen.
+mpe_locbias_1hr_rerun : OFF # ON/OF .Apps_defaultsF flag to
+ # determine if local bias should be
+ # recalculated as part of the mpe_fieldgen
+ # rerun from hmap_mpe
+ # ON -- recalc loc bias on rerun
+ # OFF -- do not recalc loc bias on rerun
+mpe_del_gage_zeros : OFF # ON/OFF flog to determine if a zero gage
+ # value should be removed from consideration
+ # if the radar shows > 0.0
+ # ON -- check for and remove zero gage values
+ # OFF -- do not check for or remove zero
+ # gage values
+
+mpe_selected_grid_gagediff : MMOSAIC
+
+mpe_qpe_fieldtype : MMOSAIC # field type to be saved as qpe
+mpe_generate_list : BMOSAIC,GAGEONLY,LMOSAIC,LSATPRE,MLMOSAIC,MMOSAIC,RMOSAIC,SATPRE,P3LMOSAIC,SRMOSAIC,SGMOSAIC,QMOSAIC,LQMOSAIC,MLQMOSAIC,RFCBMOSAIC,RFCMMOSAIC,RFCMOSAIC,SAVELEVEL2
+mpe_base_radar_mosaic : RMOSAIC # The base radar mosaic used for the fields
+ # that mpe_fieldgen generates
+mpe_show_missing_gage : None # MPE missing gage display.
+ # (None,All,Reported)
+mpe_bad_gages_dir : $(rfcwide_output_dir)/bad_gages
+
+### directory locations of various format MPE output grid files
+mpe_gif_dir : $(rfcwide_output_dir)/qpe_gif
+mpe_jpeg_dir : $(rfcwide_output_dir)/qpe_jpeg
+mpe_netcdf_dir : $(rfcwide_output_dir)/qpe_netcdf
+mpe_grib_dir : $(rfcwide_output_dir)/qpe_grib
+
+### which format MPE output grid files to save
+mpe_save_gif : nosave
+mpe_save_jpeg : nosave
+mpe_save_netcdf : nosave
+mpe_save_grib : save
+
+### prefixes for various format MPE output grid files, blank by default
+mpe_gif_id :
+mpe_jpeg_id :
+mpe_netcdf_id :
+mpe_grib_id :
+
+### mpe gage QC tokens
+mpe_gage_qc : ON
+mpe_sccqc_threshold : 2.0
+mpe_scc_boxes_failed : 4
+mpe_msc_precip_limit : 1.0
+mpe_split_screen : OFF
+
+### mpe polygon tokens
+mpe_polygon_action_order : None
+mpe_polygon_field_order : None
+
+### tokens which control the transmission of RFC bias data.
+mpe_transmit_bias : OFF
+transmit_bias_on_save : NO
+transmit_bias_on_rerun : NO
+rfc_bias_input_dir : $(mpe_dir)/bias_message_input
+rfc_bias_output_dir : $(mpe_dir)/bias_message_output
+process_bias_log_dir : $(pproc_log)/process_bias_message
+send_local_bias_when_rfc_bias_missing : NO
+
+### rfc qpe to wfo tokens
+mpe_send_qpe_to_sbn : OFF
+mpe_generate_areal_qpe : OFF
+# List of RFCs to process for Gen Areal Qpe
+gaq_rfc_list : MBRFC,NCRFC
+gaq_dur_list : 1,6,24
+gaq_app_dir : $(pproc_local_data)/app/gen_areal_qpe
+gaq_input_dir : /data/fxa/Grid/SBN/netCDF/HRAP/QPE
+gaq_log_dir : $(pproc_log)/gen_areal_qpe
+gaq_rfc_mask_dir : $(gaq_app_dir)
+gaq_temp_xmrg_dir : $(rfcwide_output_dir)/rfcqpe_temp
+gaq_xmrg_1hr_dir : $(rfcwide_output_dir)/rfcqpe01
+gaq_xmrg_6hr_dir : $(rfcwide_output_dir)/rfcqpe06
+gaq_xmrg_24hr_dir : $(rfcwide_output_dir)/rfcqpe24
+gaq_grib_dir : $(rfcwide_output_dir)/rfcqpe_grib
+
+### token which controls how PC precipitation totals are derived.
+sum_pc_reports : NO
+
+geo_st3_bin : $(geo_data)/$(st3_rfc)/binary #geo_data defined outside of pproc
+geo_st3_ascii : $(geo_data)/$(st3_rfc)/ascii
+adjust_PC_startingtime : 4 #allow PC starting time tolerance
+
+### tokens for sending MPE mean field bias data to the ORPG
+
+bias_message_dir : $(apps_dir)/data/fxa/radar/envData
+
+### tokens for Lightning Data processing
+
+lightning_input_dir : /data/fxa/point/binLightning/netcdf
+
+lightning_log_dir : $(pproc_log)/lightning_proc
+
+### tokens for D2D display
+
+mpe_d2d_display_grib : ON # ON/OFF token to determine if further
+ # processing of grib file for D2D display
+ # is required
+
+d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files
+ # to be processed for D2D display
+
+mpe_send_grib : OFF # ON/OFF token to determine if grib file is
+ # to be sent to other sites such as NPVU
+
+# disagg processing tokens
+
+mpe_disagg_execute : OFF
+mpe_disagg_method : POINT
+mpe_disagg_6hreq_0 : 1
+mpe_disagg_6hrgt_0 : 1
+
+#====== High-resolution Precipitation Estimator (HPE) tokens====================
+
+# DecodeDHR tokens (formerly DecodeHDP tokens that looked like hdp_*)
+
+dhr_log_dir : $(pproc_log)/decodedhr # DHR Decoder logs
+
+dhr_prod_dir : $(pproc_local_data)/dhr_gather # DHR input directory
+
+dhr_dirname1 : $(data_archive_root)/radar # first part of directory name
+# # containing DHR products for
+# # associated or dial in radars
+
+dhr_dirname2 : DHR/layer0/res1/level256 # second part of directory name
+ # containing DHR products for
+ # associated or dial in radar
+dhr_grid_dir : $(pproc_local_data)/dhr_decoded # decoded DHR radar grids
+
+dhr_error_dir : $(pproc_local_data)/dhr_error # DHR error files
+dhr_arch_dir : $(pproc_local_data)/dhr_archive # DHR archives
+
+# DecodeDSP tokens (formerly DecodeHDP tokens that looked like hdp_*)
+
+dsp_log_dir : $(pproc_log)/decodedsp # DSP Decoder logs
+
+dsp_prod_dir : $(pproc_local_data)/dsp_gather # DSP input directory
+
+dsp_dirname1 : $(data_archive_root)/radar # first part of directory name
+# # containing DSP products for
+# # associated or dial in radars
+
+dsp_dirname2 : STP/layer0/res2/level256 # second part of directory name
+ # containing DSP products for
+ # associated or dial in radars
+ # NOTE that DSP is level256 vs level16 for
+ # STP and this is where it is stored
+ # in AWIPS
+dsp_grid_dir : $(pproc_local_data)/dsp_decoded # decoded DSP radar grids
+dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files
+dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives
+
+
+hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC
+hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe
+
+hpe_satpre_dir : $(mpe_fieldgen_product_dir)/satpre
+hpe_input_dir : $(pproc_local_data)/app/hpe
+hpe_output_dir : $(pproc_local_data)/hpe
+hpe_sat_statevar_dir : $(rfcwide_output_dir)/state_var
+
+hpe_log_dir : $(pproc_local_data)/log/hpe
+
+hpe_hrap_grid_factor : 4 # 1 for HRAP grid
+ # 4 for quarter HRAP grid
+
+hpe_dhrmosaic_dir : $(hpe_output_dir)/dhrmosaic
+hpe_bdhrmosaic_dir : $(hpe_output_dir)/bdhrmosaic
+hpe_ermosaic_dir : $(hpe_output_dir)/ermosaic
+hpe_ebmosaic_dir : $(hpe_output_dir)/ebmosaic
+hpe_avg_ermosaic_dir : $(hpe_output_dir)/avgrmosaic
+hpe_max_ermosaic_dir : $(hpe_output_dir)/maxrmosaic
+hpe_lsatpre_dir : $(hpe_output_dir)/lsatpre
+
+hpe_dspheight_dir : $(hpe_output_dir)/height
+hpe_dspindex_dir : $(hpe_output_dir)/index
+hpe_height_dir : $(hpe_output_dir)/height
+hpe_index_dir : $(hpe_output_dir)/index
+
+hpe_dhrmosaic_grib_dir : $(hpe_dhrmosaic_dir)/grib
+dhrmosaic_netcdf_dir : $(hpe_dhrmosaic_dir)/netcdf
+dhrmosaic_gif_dir : $(hpe_dhrmosaic_dir)/gif
+hpe_bdhrmosaic_grib_dir : $(hpe_bdhrmosaic_dir)/grib
+bdhrmosaic_netcdf_dir : $(hpe_bdhrmosaic_dir)/netcdf
+bdhrmosaic_gif_dir : $(hpe_bdhrmosaic_dir)/gif
+hpe_ermosaic_grib_dir : $(hpe_ermosaic_dir)/grib
+ermosaic_netcdf_dir : $(hpe_ermosaic_dir)/netcdf
+ermosaic_gif_dir : $(hpe_ermosaic_dir)/gif
+hpe_ebmosaic_grib_dir : $(hpe_ebmosaic_dir)/grib
+ebmosaic_netcdf_dir : $(hpe_ebmosaic_dir)/netcdf
+ebmosaic_gif_dir : $(hpe_ebmosaic_dir)/gif
+
+dhrmosaic_save_grib : save
+dhrmosaic_save_gif : nosave
+dhrmosaic_save_netcdf : nosave
+bdhrmosaic_save_grib : save
+bdhrmosaic_save_gif : nosave
+bdhrmosaic_save_netcdf : nosave
+ermosaic_save_grib : save
+ermosaic_save_gif : nosave
+ermosaic_save_netcdf : nosave
+ebmosaic_save_grib : save
+ebmosaic_save_gif : nosave
+ebmosaic_save_netcdf : nosave
+
+hpe_gif_dir : $(hpe_output_dir)/hpe_gif
+hpe_jpeg_dir : $(hpe_output_dir)/hpe_jpeg
+hpe_netcdf_dir : $(hpe_output_dir)/hpe_netcdf
+hpe_grib_dir : $(hpe_output_dir)/hpe_grib
+hpe_xmrg_dir : $(hpe_output_dir)/hpe_xmrg
+hpe_save_gif : nosave
+hpe_save_jpeg : nosave
+hpe_save_netcdf : nosave
+hpe_save_grib : nosave
+
+dhr_window : 15
+dsp_window : 15
+dsp_duration : 60
+
+hpe_base_radar_mosaic : ERMOSAIC
+hpe_qpe_fieldtype : ERMOSAIC
+hpe_load_misbin : OFF
+hpe_debug_log : ON
+hpe_use_locbias : OFF
+hpe_runfreq : 5
+hpe_timelag : 5
+hpe_bias_source : RFC
+hpe_rfc_bias_lag : 2
+hpe_purge_logage : 720
+hpe_purge_fileage : 180
+hpe_purge_xmrgage : 75
+
+dhrmosaic_d2d_display_grib : ON
+ermosaic_d2d_display_grib : ON
+ebmosaic_d2d_display_grib : ON
+bdhrmosaic_d2d_display_grib : ON
+hpe_run_nowcast : ON
+hpe_nowcast_generate_list : PRTM, BPTRM
+hpe_nowcast_dir : $(hpe_output_dir)/nowcast
+hpe_rate_save_grib : save
+hpe_brate_save_grib : save
+hpe_tp1h_save_grib : save
+hpe_btp1h_save_grib : save
+hpe_4km_tp1h_save_grib : nosave
+hpe_4km_btp1h_save_grib : nosave
+nowcast_d2d_display_grib : ON
+hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method
+hpn_use_meanvelocity : OFF
+hpn_meanvelocity_direction : 45 # direction precip is moving towards
+hpn_meanvelocity_speed : 20 # miles per hour
+
+
+hpe_send_grib : OFF # ON/OFF token to determine if grib file is
+ # to be sent to other sites such as NPVU
+
+#========END HPE tokens======================================================
+
+# ================= Flash Flood Guidance System =============================
+
+ffg_level : oper
+
+ffg_dir : $(apps_dir)/rfc/nwsrfs/ffg # Top-level ffg
+ffg_bin : $(ffg_dir)/bin # FFG execute dir
+ffg_files : $(ffg_dir)/files # FFG file group
+ffg_gsfiles : $(ffg_files)/$(ffg_level) # FFG files dir
+ffg_out_dir : $(ffg_dir)/output # FFG output dir
+ffg_grib_out : $(ffg_out_dir)/grib # GRIB output
+ffg_scripts : $(ffg_dir)/scripts # FFG scripts
+ffg_gff_level : grff # regular grid ffg dir
+ffg_gro_level : grro # regular grid ro dir
+ .Apps_defaults
+ffg_usr_dir : $(ffg_gsfiles)/user # FFG user dir
+ffg_area_dir : $(ffg_gsfiles)/affg # FFG area dir
+ffg_cary_dir : $(ffg_gsfiles)/cary # FFG carryover dir
+ffg_define_dir : $(ffg_gsfiles)/define # FFG definition dir
+ffg_gridff_dir : $(ffg_gsfiles)/$(ffg_gff_level) # FFG grid ff dir
+ffg_gridro_dir : $(ffg_gsfiles)/$(ffg_gro_level) # FFG grid ro dir
+ffg_hwatr_dir : $(ffg_gsfiles)/hffg # FFG headwater dir
+
+ffg_gridpm_dir : $(ffg_gsfiles)/gdpm # grid runoff adjust parameters
+ffg_group_dir : $(ffg_gsfiles)/grpp # FFG groups of products
+ffg_prod_dir : $(ffg_gsfiles)/prod # FFG products dir
+ffg_text_dir : $(ffg_gsfiles)/text # FFG text dir
+ffg_wsup_dir : $(ffg_gsfiles)/wsup # Water supply dir
+
+# ffg program control
+ffg_error_output : on # whether to output error messages
+ffg_log_output : off # whether to output log messages
+
+# ===================== GRIB packer/encoder =================================
+
+grib_dir : $(apps_dir)/rfc/grib # Top level grib
+grib_rls : $(pproc_bin) # location of gribit executable
+grib_arc : $(grib_dir)/bin/ARCHIVE # grib archive
+grib_in_dir : $(rfcwide_xmrg_dir) # depends on data to be encoded
+grib_out_dir : $(grib_dir)/output # GRIB encoded files
+grib_error_output : on # turn on/off GRIB error output
+grib_set_subcenter_0 : off # set subcenter to 0
+ # on - set subcenter to 0
+ # off - do not set subcenter to 0
+
+# end of ffg apps
+
+#================== XSETS Apps_defaults Tokens - 08/03/2001 ===================
+
+# [] = default value
+#.................................
+# Date Control
+#.................................
+xsets_date_used : SYSTEM # computer system clock
+ # OFSFILES = forecast time series
+ # mm/dd/ccyy = explicit date, 12Z
+
+#.................................
+# Directories and files to use
+#.................................
+xsets_dir : $(apps_dir)/rfc/xsets
+xsets_level : oper
+xsets_files : $(xsets_dir)/files
+xsets_xsfiles : $(xsets_files)/$(xsets_level)
+xsets_param_dir : $(xsets_xsfiles)/param
+xsets_config_file : xsetsconfig
+xsets_output_dir : $(xsets_xsfiles)/output
+
+#.................................
+# Commands
+#.................................
+xsets_editor : "nedit"
+xsets_hydrographs_cmd : "$(xsets_dir)/bin/RELEASE/new_hydroplot"
+xsets_print_cmd : "lp"
+xsets_xmit_cmd : "cat "
+
+#.................................
+# Parameters for creation of hydrographs
+#.................................
+xsets_hydro_button : NO # Create Make Hydro button, [NO]
+ (currently unused)
+xsets_make_hydro : NO # Create .gif hydrographs, [NO]
+
+#.................................
+# NEW_HYDROPLOTS parameters
+#.................................
+xsets_html_daily_dir : /pub/FcstGraphs # Location of gif images on
+ web server
+xsets_html_flood_dir : /pub/FloodGraphs # Location of gif images on
+ web server
+xsets_hydrographs_html : 1 # 1 = create basic html
+ 0 = no html created
+xsets_hydrographs_output: "$(xsets_output_dir)/gifs"
+xsets_hydrographs_param : $(xsets_xsfiles)/hydrographs/param
+
+#.................................
+# File Print Options and Settings
+#.................................
+xsets_add_remarks : NO # Add remark after each site, [NO]
+xsets_brackets : NO # Put brackets around latest stage,
+ # forecasts and dates, [NO]
+xsets_cmt_line : NO # YES = separate line,
+ # NO = append to description, river
+xsets_expanded_dates : YES # Insert MMDD before values, [NO]
+xsets_fgroup_preamble : "FORECAST GROUP IS" #Preamble for the fgroup (string)
+xsets_H_precision : 1 # 0, [1], or 2 decimal precision of stages
+xsets_output_style : E # E = Expanded, each day has line,
+ # C = Compact
+xsets_print_crests : YES # Print crest comment, [NO]
+xsets_print_disclaimer : YES # Print disclaimer, [NO]
+xsets_print_fs : YES # YES = encode flood stage in SHEF,
+ # [NO] = display as comment
+xsets_print_fs_cross : COMMENT # Time level passes flood stage
+ # [NO] = don't include,
+ # SHEF = encode in SHEF,
+ # COMMENT = display as comment
+xsets_print_ls : COMMENT # Latest stage
+ # [NO] = don't include,
+ # SHEF = encode in SHEF,
+ # COMMENT = display as comment
+xsets_print_MAP : NO # Print MAP values, [NO]
+xsets_print_qpf : COMMENT # Print QPF values
+ # [NO] = don't include,
+ # SHEF = encode in SHEF,
+ # COMMENT = display as comment
+xsets_print_ws : YES # Display warning/caution stage, [NO]
+xsets_product_hdr : PIT # Indentifier in Product Header, non-AWIPS
+xsets_Q_precision : 1 # 0, [1], 2 decimal precision of flows
+xsets_signature : $(LOGNAME) #User signature (string)
+xsets_wmo_id : TTAA00 KTUR DDHHMM # the wmo id
+xsets_ws_label : "WARNING" # Label for WARNING/[CAUTION] stage (string)
+xsets_zczc : YES # Include ZCZC & NNNN, [NO], non-AWIPS
+
+#.................................
+# Run Options
+#.................................
+xsets_age_check : 6 # Number of hours old of forecast before
+ # error generated, [6]
+xsets_edit_lock : NO # Lock main display when editing SETS file, [NO]???
+xsets_gen_summary : NO # Include summary of flood locations, [NO], Currently Unused
+xsets_msg_obs_warn : YES # Print warning when observed values are
+ # missing, [NO]
+xsets_numhrs_curob : 12 # number of hours back from current time to use
+ # informix obs as "current obs"
+xsets_num_MAP_values : 4 # Number [4] of MAP values to include in product
+xsets_num_qpf_values : 4 # Number [4] of qpf values to include in product
+xsets_numdays_hydro : 3 # Run Parameters for FCSTPROG
+xsets_ofs_select : OFS # OFS or IFP for time series files
+xsets_stdout : NO # Send wprint messages to stdout, [NO]
+xsets_time : Z # Time Zone code used in product
+ # ([Z], E, C, M, P, A, H OR N)
+# ================== end of xsets tokens =======================================
+
+#================== XNAV Apps_defaults Tokens - 03/29/2000 ====================
+# defaults for program XNAV
+
+xnav_user : oper
+
+#.................................
+# Date/time related tokens
+#.................................
+db_days : 10
+xnav_daily_days : 30
+xnav_ffg_periods : 3
+xnav_sixhr_periods : 40
+xnav_hyd_days_fut : 5
+xnav_hyd_days_prev : 5
+xnav_precip_hours : 240
+xnav_settoday :
+
+#.................................
+# Directories and files to use
+#.................................
+xnav_dir : $(apps_dir)/rfc/xnav
+xnav_data : $(xnav_dir)/data
+xnav_params : $(xnav_dir)/parameters
+xnav_P1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
+xnav_S1xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
+xnav_bin_dir : $(xnav_dir)/bin
+xnav_data_dir : $(xnav_data)
+xnav_ffg_dir : $(ffg_dir)/output/$(xnav_user)
+xnav_geo_data : $(geo_data)/$(ifp_rfc)/binary
+xnav_gif_dir : $(HOME)/gifs/xnav
+xnav_grid_ffg_dir : $(ffg_dir)/files/$(xnav_user)/grff
+xnav_localdata_dir : $(xnav_data)/localdata
+xnav_misc_dir : $(xnav_data)/misc_data
+xnav_qpfbin_dir : $(xnav_data)/wfoqpf
+xnav_rfcfmap_dir : $(xnav_data)/rfcqpf
+xnav_rules_dir : $(xnav_params)/rules
+xnav_shefdata_dir : $(xnav_data)/shefdata
+xnav_wfoqpf_dir : $(apps_dir)/rfc/data/products
+xnav_xmrg_dir : $(rfs_dir)/ofs/files/$(xnav_user)/griddb
+nmap_xmrg_dir : $(xnav_rfcfmap_dir)/nmap
+
+#.................................
+# Fonts and colors
+#.................................
+xnav_action_color : yellow
+xnav_flood_color : red
+xnav_ok_color : green
+xnav_ts1_color : yellow
+xnav_ts2_color : magenta
+xnav_label_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_legend_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_list_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_menu_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_pb_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_text_font : -*-charter-bold-*-*-*-17-*-*-*-*-*-*-*
+xnav_toggle_font : "-*-new century schoolbook-*-*-*-*-14-*-*-*-*-*-*-*"
+xnav_town_font : "-*-new century schoolbook-bold-*-*-*-14-*-*-*-*-*-*-*"
+
+idma_label_font : "-*-new century schoolbook-bold-*-*-*-12-*-*-*-*-*-*-*"
+idma_data_font : "-*-new century schoolbook-bold-*-*-*-18-*-*-*-*-*-*-*"
+
+#.................................
+# Window size controls
+#.................................
+xnav_hrap_x : 59
+xnav_hrap_xor : 311
+xnav_hrap_y : 83
+xnav_hrap_yor : 410
+xnav_hydro_height : 400
+xnav_hydro_width : 750
+xnav_scale : 8.0
+xnav_scale_colors : 3.0
+xnav_x_offset : 100
+xnav_y_offset : 100
+
+#.................................
+# Display options
+#.................................
+xnav_basins : yes
+xnav_counties : no
+xnav_cwas : no
+xnav_fgroups : no
+xnav_flights : no
+xnav_grid : no
+xnav_hydro_segments : no
+xnav_radars : no
+xnav_rfc : yes
+xnav_rivers : yes
+xnav_states : yes
+xnav_towns : yes
+
+#.................................
+# Other control options
+#.................................
+load_db_on_boot : no
+load_ofs_on_boot : no
+check_flood_on_boot : no
+use_new_xmrg : yes
+xnav_afosid : ? #PITRR1RHA
+xnav_editor : nedit
+xnav_exception_file : exception_file
+xnav_grid_ffg_pattern : xhr
+xnav_locrangecheck : no
+xnav_office_hdr : ? #KRHA
+xnav_only_use_ofs_data : no
+xnav_pe : "HG HP HT PP PT QR QT SD SF SW TA TD TS XC"
+xnav_precip_filter : .01
+xnav_route_code : ? #ES
+xnav_seg_type : 2
+xnav_send_shef : no
+xnav_show_p1_files : yes
+xnav_suppress_msg : yes
+xnav_xmit_cmd : "cat "
+
+# ====== MAKE24HRXMRG Tokens ======
+
+make24hrxmrg_settoday : # Run date in mm/dd/yyyy. Empty means uses number
+ # of days back argument to program.
+make24hrxmrg_debug_level : 0 # Set debug output level. 1 or 2 yields more output.
+make24hrxmrg_endtime : # Hour to end the 24 hour total. Default: 12Z if not
+ # given.
+make24hrxmrg_tz : Z # Time zone; E, C, M, P, Y, H, L, or Z (defautlt).
+
+# ================== end of xnav tokens ========================================
+
+#================== XDAT Apps_defaults Tokens - 03/29/2000 ====================
+# defaults for program XDAT
+
+xdat_user : oper
+
+#................................
+# Date/time related tokens
+#................................
+xdat_flood_hours : 6
+xdat_settoday :
+
+#..................................
+# Directories and files to use
+#..................................
+xdat_dir : $(apps_dir)/rfc/xdat
+xdat_data : $(xdat_dir)/data
+xdat_params : $(xdat_dir)/parameters
+xdat_groups_dir : $(xdat_params)/groups
+xdat_localdata_dir : $(xdat_data)/localdata
+xdat_shefdata_dir : $(xdat_data)/shefdata
+
+#..................................
+# Fonts and colors to use
+#..................................
+xdat_label_font : ncenb14
+xdat_list_font : helvb14
+xdat_text_font : user14x19
+xdat_pb_font : ncenb14
+
+#.................................
+# Window size controls
+#.................................
+xdat_scale : 1.0
+
+#..................................
+# Display Options
+#..................................
+xdat_clear_id : yes
+
+#..................................
+# Other Control Options
+#..................................
+xdat_afosid : ?ofstest?
+xdat_office_hdr : ???
+xdat_post_unk : $(shef_post_unk)
+xdat_route_code : ???
+xdat_send_shef : no
+xdat_xmit_cmd : "cat "
+# ================== end of xdat tokens ========================================
+
+#====================== Shape Data File Directory ==============================
+shape_data_dir : $(apps_dir)/ffmpShapeData # Directory holding shape
+ # files acting as data files
+
+
+#================== send_rfc Apps_defaults Tokens - 3/08/2001 =================
+send_rfc_dir : $(apps_dir)/rfc/send_rfc
+send_rfc_input_dir : $(send_rfc_dir)/data/send
+send_rfc_id : WWW
+send_hardcopy_nnn : PRI-WRK-EDI-SNO-ADM-RVF
+send_rfc_hardcopy : $(send_rfc_dir)/data/sbnprods
+send_rfc_hpc : 0
+send_rfc_host : ds-www
+send_rfc_alternate : 0
+# ================== end of send_rfc tokens ====================================
+
+#================== verify Apps_defaults Tokens - 08/03/2001 ==================
+# defaults for program verify
+vsys_output : $(vsys_dir)/output #location of output files
+vsys_input : $(vsys_dir)/input #location of input files
+vsys_files : $(vsys_dir)/files #location of verify files
+vsys_scripts : $(vsys_dir)/scripts #location of verify scripts
+vsys_output_log : test.log #name of log file
+vsys_ihfsdb : $(db_name) #ihfs_db name
+vsys_vdb : vdb1_1rfc #verification db name for RFC="rfc"
+verify_rls : $(vsys_dir)/bin/RELEASE #The release directory.
+vsys_rls : $(verify_rls) #Not really needed, but consistent.
+
+# ================== end of verify tokens ======================================
+
+# ================== RFC Archive Database tokens ===============================
+
+archive_shefdata_dir : /data/fxa/ispan/hydro_adbs # directory for archive data
+archive_enable : OFF # ON/OFF - Enable or Disable
+ # archive data feed (OFF by default)
+metar_output_dir : $(whfs_local_data_dir)/metar_output # metar2shef temp output directory
+ # used if archive_enable is ON
+
+#================== Directory tokens for RFC Archive Database ==================
+adb_dir : /rfc_arc # Base RFC Archive Directory
+adb_raw_que : /rfc_arc_data/q/raw/ # pathname for raw q input directory
+adb_pro_que : /rfc_arc_data/q/processed/ # pathname for processed q input directory
+adb_bin_dir : $(adb_dir)/bin # pathname for the bin directory
+adb_cfg_dir : $(adb_dir)/cfg # pathname for the config directory
+adb_lib_dir : $(adb_dir)/lib # pathname for the lib directory
+adb_logs_dir : $(adb_dir)/logs # pathname for the logs directory
+adb_scripts_dir: $(adb_dir)/scripts # pathname for the scripts directory
+
+#================== Shefdecode tokens for RFC Archive Database =================
+
+adb_shef_winpast : 9999 # number of days in past to post data for RAW
+adb_shef_winfuture : 9999 # number of mins in future to post obs data
+ # for RAW.
+adb_shef_winpast_pro : 9999 # number of days in past to post data
+adb_shef_winfuture_pro : 9999 # number of minutes in future to post obs data
+shefdecode_rax_userid : oper # controlling UNIX user
+adb_shefdecode_input : $(adb_cfg_dir)/decoders # adb SHEF parameter file
+ # location
+adb_shef_raw_perflog : OFF # ON/OFF - create a separate performance
+ # log file to save internal decoder timing
+ # messages for monitoring performance
+adb_shef_raw_logs_dir : $(adb_logs_dir)/decoder/raw/logs # pathname for the
+ # daily logs directory
+adb_shef_raw_err_dir : $(adb_logs_dir)/decoder/raw/err # pathname for the
+ #product logs directory
+adb_shef_raw_keeperror : ALWAYS # keep files (=ALWAYS) or only
+ # when errors occur (=IF_ERROR)
+adb_shef_raw_post_unk : IDS_AND_DATA # NONE - do not post to the UnkStnValue tables
+ # values IDS_ONLY or IDS_AND_DATA
+ # will post everything
+ # to the UnkStnValue table
+adb_shef_pro_post_unk : NONE # NONE - do not post to the UnkStnValue tables
+ # values IDS_ONLY or IDS_AND_DATA
+ # will post everything
+ # to the UnkStnValue table
+adb_shef_pro_perflog : OFF # ON/OFF - create a separate performance
+ # log file to save internal decoder timing
+ # messages for monitoring performance
+adb_shef_pro_logs_dir : $(adb_logs_dir)/decoder/processed/logs # pathname for the
+ # daily logs directory
+adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for the
+ # product logs directory
+adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only
+ # when errors occur (=IF_ERROR)
+adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables
+adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables
+adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be
+ # posted for raw decoder.
+adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder.
+adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder.
+adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder.
+adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw
+ # decoder.
+adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table
+ # ON will write to both pecrsep and peirsep tables
+adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder.
+adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder.
+adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro
+ # decoder.
+adb_shef_pro_tmp_dir : $(adb_pro_que)
+adb_shef_raw_tmp_dir : $(adb_raw_que)
+adb_shef_raw_add_adjust : OFF
+
+#========== IHFS->RAX synchronization tokens for RFC Archive Database ==========
+adb_sync_logs_dir : $(adb_logs_dir)/dbsync # directory for synchronization log files
+adb_sync_mode : ANALYSIS # ANALYSIS or UPDATE
+adb_sync_tablenames : ALL # List of table names to synchronize
+adb_sync_ihfs_ingest: USE # USE or IGNORE
+adb_sync_rivercrit : ACTION # ACTION, FIS or BOTH
+
+
+#================== DatView program tokens for RFC Archive Database ============
+datview_db_name : $(adb_name)
+datview_startdate : '1975-01-01 00:00:00'
+datview_label_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
+datview_list_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
+datview_text_font : -schumacher-clean-bold-r-normal-*-14-*-75-75-c-80-*-*
+datview_text2_font :-adobe-courier-bold-r-normal-*-*-140-*-*-m-*-iso8859-1
+datview_bg_color : black
+datview_fg_color : white
+datview_ob_color1 : green
+datview_ob_color2 : blue
+datview_ob_color3 : yellow
+datview_ob_color4 : red
+datview_ob_color5 : DarkOrange
+datview_ob_color6 : SlateGray1
+datview_plot_font : -adobe-courier-bold-r-normal-*-*-80-*-*-m-*-iso8859-1
+datview_plot_width : 750
+datview_plot_height : 420
+datview_data_dir : /home/oper
+datview_raw_shef_dir : $(adb_raw_que)
+datview_pro_shef_dir : $(adb_pro_que)
+datview_office_header : KTUA # to be set by each RFC
+datview_pil : OKCRR1TUR # to be set by each RFC
+
+
+#=============== ARCH_NAV Apps_defaults Tokens - 05/5/2005 ==================
+# defaults for program ARCNAV
+
+anav_user : oper
+
+#.................................
+# Date/time related tokens
+#.................................
+anav_daily_days : 30
+anav_sixhr_periods : 40
+anav_precip_hours : 24
+
+
+#.................................
+# Directories and files to use
+#.................................
+
+anav_dir : /awips/hydroapps/lx/rfc/xnav
+anav_data : /data
+anav_flatfiles : $(anav_data)/flatfiles
+anav_params : $(anav_dir)/parameters
+anav_data_dir : $(anav_data)
+anav_geo_data : /awips/hydroapps/lx/geo_data/$(ifp_rfc)/binary
+anav_gif_dir : /rfc_arc/data/arcnav/gifs
+anav_localdata_dir : $(anav_data)/localdata
+anav_xmrg_dir : $(anav_flatfiles)
+
+#.................................
+# Fonts and colors
+#.................................
+anav_label_font : courb14gr
+anav_legend_font : courb14gr
+anav_list_font : courb14gr
+anav_menu_font : 9x15
+anav_pb_font : courb12gr
+anav_text_font : helvb18gr
+anav_toggle_font : courb14gr
+anav_town_font : courb12gr
+
+#.................................
+# Window size controls
+#.................................
+anav_hrap_x : 200
+anav_hrap_xor : 850
+anav_hrap_y : 200
+anav_hrap_yor : 470
+anav_hydro_height : 400
+anav_hydro_width : 750
+anav_scale : 3.5
+anav_scale_colors : 3.0
+anav_x_offset : 300
+anav_y_offset : 300
+
+#.................................
+# Display options
+#.................................
+anav_basins : yes
+anav_counties : no
+anav_cwas : no
+anav_fgroups : no
+anav_flights : no
+anav_grid : no
+anav_hydro_segments : no
+anav_radars : no
+anav_rfc : no
+anav_rivers : no
+anav_states : yes
+anav_towns : yes
+
+#.................................
+# Other control options
+#.................................
+anav_editor : nedit
+anav_suppress_msg : yes
+
+#......................................
+# tokens added for arcnav application
+# for future use
+#......................................
+anav_ok_color : green
+anav_action_color : yellow
+anav_flood_color : red
+anav_ts1_color : yellow
+anav_ts2_color : magenta
+
+# ================= end of arcnav tokens ======================================
+
+# ================== end of RFC Archive Database tokens ========================
+
+# ================== SSHP Directory Structure and application tokens ===============================
+
+local_data_sshp_dir : $(whfs_local_data_dir)/sshp_transfer
+sshp_control_dir : $(whfs_local_data_dir)/app/sshp
+sshp_ofs_extract_text_dir : $(local_data_sshp_dir)/ofs_extract_text
+sshp_ofs_extract_xml_dir : $(local_data_sshp_dir)/ofs_extract_xml
+sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml
+sshp_incoming_dir : $(local_data_sshp_dir)/incoming
+sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing
+sshp_log_dir : $(whfs_log_dir)/sshp
+sshp_java_process_host : px1f
+sshp_invoke_map_preprocess: ON
+sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY
+sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts
+sshp_initial_forecast_length: 24 # length of forecast in hours
+sshp_max_forecast_length: 120 # max length of forecast in hours that user can make generated in GUI
+sshp_sac_update_expiration_hours: 25 # number of hours after which to update locally the SAC states
+sshp_sac_update_hours_forward: -2 # number of hours forward of last top of hour to save sac states -
+ # negative -2 means 2 hours BEFORE last top of hour
+sshp_adjustment_pairing_minutes : 70
+sshp_adjustment_interpolation_hours : 3
+sshp_show_simulated_timeseries : true
+
+sshp_data_dir : $(whfs_local_data_dir)/sshp # base sshp dynamic data dir
+sshp_precip_dir : $(sshp_data_dir)/precip # default location for saved precip files
+sshp_background_forecast_output_dir : $(sshp_data_dir)/forecast
+sshp_background_forecast_length : 48 # length of a background forecast
+
+sshp_hpn_minutes_before : 5 # don't use grid files prior to X minutes before Hour
+sshp_hpn_minutes_after : 5 # don't use grid files after X minutes past the Hour
+
+sshp_show_unadjusted_states: false # initial setting of option in GUI for displaying the unadjusted SAC-SMA states
+# ==================== Radar Climatology Tokens ==============================
+radclim_data_dir : $(pproc_local_data)/app/radclim
+
+# ==================== PDC Preprocessor Tokens ===============================
+pdc_clean_cache_minutes : 60
+pdc_temperature_hours : 168
+pdc_height_hours : 168
+pdc_snow_hours : 168
+pdc_wind_hours : 168
+pdc_weather_hours : 168
+pdc_precip_hours : 168
+pdc_lower_window : 5
+pdc_upper_window : 5
+
+pdc_pp_dir : $(whfs_local_data_dir)/pdc_pp
+pdc_pp_log_dir : $(whfs_log_dir)/pdc_pp
+
+# ====================== Historical Data Browser Tokens =======================
+
+hdb_help_dir : $(hdb_dir)/help_files # Historical data browser help
+ # files
+hdb_script_directory : $(hdb_dir)/scripts # Historical data browser
+ # scripts dir
+hdb_config_dir : $(hdb_dir)/app-defaults # Historical data browser
+ # configuration file directory
+
+hdb_height_in_pixels : 900 # Historical data browser map height in
+ # pixels
+hdb_width_in_pixels : 1200 # Historical data browser map width in
+ # pixels
+hdb_center_lat : 35 # The initial center latitude of the HDB
+hdb_center_lon : -88.9 # The initial center longitude of the HDB
+hdb_map_width : 2999.862 # The width in nautical miles of the area
+ # displayed in the HDB
+hdb_disclosure_limit : 60 # The disclosure limit for displaying finer
+ # detail in the city overlay.
+hdb_map_projection : FLAT # The initial map projection used by HDB.
+ # Possible values: FLAT, POLAR, HRAP
+# ====================== DHM Token =======================
+dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir
+dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir
+dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir
+rdhm_input_dir : $(geo_data)
+dhm_rain_plus_melt_data_dir: $(geo_data)
+# ================== end of SSHP Directory Structure tokens ========================
+
+# ========================== NRLDB Tokens===================
+nrldb_log : $(whfs_log_dir)/nrldb
+nrldb_data : $(whfs_local_data_dir)/nrldb
+nrldb_config : $(whfs_config_dir)/nrldb
+nrldb_tmp : /awips/hydroapps/whfs/local/data/output
+
+# The syntax needed in the file is:
+#
+# token : resource
+#
+# where: token is defined as a string delimited by white space or
+# the delimiter,
+# the delimiter between token and resource is the :,
+# no white space needs to surround the delimiter,
+# comments are indicated by a #,
+# neither token nor resource can begin with a # or :,
+# a # or a : can be embedded within resource,
+# resource can contain white space if it is bounded by
+# the ' or " characters,
+# blank lines are allowed.
+# referbacks are indicated by $(...). The '...' is resolved
+# the same way any other token is, and is substituted for
+# the $(...) string to compose the final resource value.
+# Multiple referbacks are allowed in a resource, but
+# embedded referbacks are not allowed (i.e. no
+# $($(...)) allowed).
+# Note that this file is read only if the token can not be resolved
+# as an environment variable.
+#
+# ==============================================================================
diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java
index ea9725a6e2..ee6f61df3e 100644
--- a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java
+++ b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/GAFF.java
@@ -60,6 +60,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 5, 2011 mpduff Initial creation
+ * Sep 5, 2013 16437 wkwock Fix the "HiRes" issue
*
*
*
@@ -343,6 +344,9 @@ public class GAFF {
try {
uri = db.getDataURI(rfc, durString, today);
+ if (uri == null) {
+ uri = db.getDataURI(rfc+"-HiRes", durString, today);
+ }
if (uri == null) {
continue;
}
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java
index 5d467b9b80..40aaded68b 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java
@@ -99,8 +99,8 @@ import com.vividsolutions.jts.geom.Polygon;
* 02/25/13 1660 D. Hladky FFTI design change to help mosaic processing.
* 05/01/2013 15684 zhao Unlock when Exception caught
* Jul 15, 2013 2184 dhladky Remove all HUC's for storage except ALL
+ * 09/03/2013 DR 13083 G. Zhang Added a fix in processRADAR(ArrayList).
*
- *
* @author dhladky
* @version 1
*/
@@ -1107,10 +1107,10 @@ public class FFMPProcessor {
for (int j = 0; j < dataVals.length; j++) {
- float fval = (float) ScanUtils.getDecodedDHRValue(dataVals[j]);
+ //float fval = (float) ScanUtils.getDecodedDHRValue(dataVals[j]);
try {
- val += ScanUtils.getZRvalue(fval,
+ val += ScanUtils.getZRvalue2(dataVals[j],//fval,// DR 13083
dhrMap.get(DHRValues.ZRMULTCOEFF),
dhrMap.get(DHRValues.MAXPRECIPRATEALLOW),
dhrMap.get(DHRValues.ZRPOWERCOEFF),
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml b/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml
index 5673569635..edcdea0661 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml
@@ -383,6 +383,12 @@
200-00:15:00
+
+
+ HPCGuide-2.5km
+ 2
+ 00-00:15:00
+ GFSGuide
diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/capture b/edexOsgi/com.raytheon.uf.tools.cli/impl/capture
index 90de8e49fe..9c6ef5af9b 100644
--- a/edexOsgi/com.raytheon.uf.tools.cli/impl/capture
+++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/capture
@@ -15,6 +15,9 @@ if [ "$REMOTE_SERVERS_TO_CHECK" == "" ]; then
REMOTE_SERVERS_TO_CHECK="dx1f dx2f dx3 dx4"
fi
+# the database host to grab current running queries for
+DATABASE_HOST="dx1f"
+
# Flags to control what data capure grabs, to enable flag must be YES, anything else will be considered off.
RUN_JSTACK="Y"
JSTACK_ITERATIONS="15"
@@ -25,7 +28,8 @@ MOVE_ALL_HS_ERR_PID="Y"
GRAB_REMOTE_TOP="Y"
GRAB_REMOTE_VMSTAT="Y"
GRAB_CAVE_AND_ALERTVIZ_LOGS="Y"
-GRAB_SCREENSHOT='Y'
+GRAB_SCREENSHOT="Y"
+GRAB_CURRENT_QUERIES="Y"
EDEX_MODE="N"
FORCE="N"
TGZ_OUTPUT="Y"
@@ -40,32 +44,44 @@ usage() {
echo "Script for capturing information about cave/edex and general server health."
echo
echo "Following options allowed"
- echo -e "-p {PID}\t\tdefault none"
- echo " Run capture for a specific PID, crash information will not be captured. Defaults to none and runs against all pids found."
- echo
- echo -e "-g {grep string}\tdefault [$grepString]"
- echo " The grep string used to find the processes"
- echo
- echo -e "-screen {y/n}\t\tdefault [$GRAB_SCREENSHOT]"
- echo " Screen print the current workstation (local user must be running capture)"
- echo
- echo -e "-s {y/n}\t\tdefault [$RUN_JSTACK]"
- echo " Run jstack to grab the thread stack information"
+ echo -e "-c \"{host names}\"\tdefault [$REMOTE_SERVERS_TO_CHECK]"
+ echo " The servers to grab top information from, make sure list is quoted and space delimited"
echo
echo -e "-d {y/n}\t\tdefault [$RUN_JMAP]"
echo " Run jmap to grab the head dump information"
echo
+ echo -e "-e {request/ingest/ingestGrib/ingestDat}"
+ echo " Run edex mode and grab information about the jvm passed. May be used multiple times to grab data about multiple jvms"
+ echo
echo -e "-f {y/n}\t\tdefault [$FORCE]"
echo " Force a jstack/jmap by default"
echo
- echo -e "-q {y/n}\t\tdefault [$RUN_QPID_STAT]"
- echo " Force a qpid-stat by default"
+ echo -e "-g {grep string}\tdefault [$grepString]"
+ echo " The grep string used to find the processes"
+ echo
+ echo -e "-l {y/n}\t\tdefault [$GRAB_CAVE_AND_ALERTVIZ_LOGS]"
+ echo " Captures the cave and alertviz logs. If run for a specific pid the only cave log captured will be for that pid"
echo
echo -e "-m {y/n}\t\tdefault [$MOVE_ALL_HS_ERR_PID]"
echo " Captures all hs_err_pid's found"
echo
- echo -e "-l {y/n}\t\tdefault [$GRAB_CAVE_AND_ALERTVIZ_LOGS]"
- echo " Captures the cave and alertviz logs. If run for a specific pid the only cave log captured will be for that pid"
+ echo -e "-p {PID}\t\tdefault none"
+ echo " Run capture for a specific PID, crash information will not be captured. Defaults to none and runs against all pids found."
+ echo
+ echo -e "-q {y/n}\t\tdefault [$RUN_QPID_STAT]"
+ echo " Run qpid-stat"
+ echo
+ echo -e "-Q {y/n}\t\tdefault [$GRAB_CURRENT_QUERIES]"
+ echo " Grab current running database queries"
+ echo
+ echo -e "-r \"Reason for capture\""
+ echo " The reason for capture, so popup will not be shown"
+ echo
+ echo -e "-s {y/n}\t\tdefault [$RUN_JSTACK]"
+ echo " Run jstack to grab the thread stack information"
+ echo
+ echo -e "-screen {y/n}\t\tdefault [$GRAB_SCREENSHOT]"
+ echo " Screen print the current workstation (local user must be running capture)"
echo
echo -e "-t {y/n}\t\tdefault [$GRAB_REMOTE_TOP]"
echo " Captures top information from servers, auto login must be enabled"
@@ -73,21 +89,12 @@ usage() {
echo -e "-v {y/n}\t\tdefault [$GRAB_REMOTE_VMSTAT]"
echo " Captures vmstat information from servers, auto login must be enabled"
echo
- echo -e "-c \"{host names}\"\tdefault [$REMOTE_SERVERS_TO_CHECK]"
- echo " The servers to grab top information from, make sure list is quoted and space delimited"
- echo
- echo -e "-r \"Reason for capture\""
- echo " The reason for capture, so popup will not be shown"
+ echo -e "-V {y/n}\t\tdefault [$RUN_VERSIONS]"
+ echo " Grab version information"
echo
echo -e "-z {y/n}\t\tdefault [$TGZ_OUTPUT]"
echo " Tar and gzip the captured data"
echo
- echo -e "-e {request/ingest/ingestGrib/ingestDat}"
- echo " Run edex mode and grab information about the jvm passed. May be used multiple times to grab data about multiple jvms"
- echo
- echo -e "-v {y/n}\t\tdefault [$RUN_VERSIONS]"
- echo " Grab version information"
- echo
echo -e "-h"
echo " Display this usage statement"
exit 0
@@ -128,12 +135,18 @@ checkYes() {
# runs import to grab screen shot of users desktop
grabScreenShot() {
if [ "$GRAB_SCREENSHOT" == "y" ]; then
- echo "Capturing screen shot of desktop"
+ echo "Capturing screen shot of desktop"
t1=`date "+%Y%m%d %H:%M:%S"`
echo "${t1}: Capturing screen shot of desktop" >> $processFile
- import -window root -display :0.0 ${dataPath}/screenShot_0.png > ${dataPath}/screenShot_0.log 2>&1 &
- import -window root -display :0.1 ${dataPath}/screenShot_1.png > ${dataPath}/screenShot_1.log 2>&1 &
- import -window root -display :0.2 ${dataPath}/screenShot_2.png > ${dataPath}/screenShot_2.log 2>&1 &
+ possibleScreens=`w -hs $user | awk '{print $3}' | sort -u`
+ count=0
+ for pScreen in $possibleScreens;
+ do
+ if [[ $pScreen =~ :[0-9]+\.[0-9]+ ]]; then
+ import -window root -display $pScreen ${dataPath}/screenShot_${count}.png > ${dataPath}/screenShot_${count}.log 2>&1 &
+ let "count+=1"
+ fi
+ done
fi
}
@@ -165,6 +178,16 @@ grabRemoteVmstat() {
fi
}
+grabCurrentDatabaseQueries() {
+ if [ "$GRAB_CURRENT_QUERIES" == "y" ]; then
+ echo "Capturing current database queries"
+ t1=`date "+%Y%m%d %H:%M:%S"`
+ echo "${t1}: Capturing current database queries" >> $processFile
+ out_file="${dataPath}/database_queries.log"
+ psql -d metadata -U awips -h ${DATABASE_HOST} -c "select datname, pid, client_addr, query, now()-xact_start as runningTime from pg_stat_activity where state != 'idle' order by runningTime desc;" >> $out_file 2>&1 &
+ fi
+}
+
checkForProcsAsOtherUsers() {
if [ ! -z "$procs" ]; then
numMyProcs=`echo "$myProcs" | wc -l`
@@ -374,21 +397,23 @@ while [ ! -z "$1" ]; do
shift 1
case $arg in
+ -a) ACCUM="$1"; shift 1;;
+ -d) RUN_JMAP="$1"; shift 1;;
+ -e) EDEX_MODE="Y"; edexProcs[$edexProcCount]="$1"; shift 1; let "edexProcCount+=1";;
+ -f) FORCE="$1"; shift 1;;
+ -g) grepString="$1"; shift 1;;
+ -l) GRAB_CAVE_AND_ALERTVIZ_LOGS="$1"; shift 1;;
+ -m) MOVE_ALL_HS_ERR_PID="$1"; shift 1;;
-p) cavePid="$1"; shift 1;;
-q) RUN_QPID_STAT="$1"; shift 1;;
- -g) grepString="$1"; shift 1;;
+ -Q) GRAB_CURRENT_QUERIES="$1"; shift 1;;
-r) REMOTE_SERVERS_TO_CHECK="$1"; shift 1;;
-s) RUN_JSTACK="$1"; shift 1;;
- -d) RUN_JMAP="$1"; shift 1;;
- -f) FORCE="$1"; shift 1;;
- -m) MOVE_ALL_HS_ERR_PID="$1"; shift 1;;
- -t) GRAB_REMOTE_TOP="$1"; shift 1;;
- -l) GRAB_CAVE_AND_ALERTVIZ_LOGS="$1"; shift 1;;
- -z) TGZ_OUTPUT="$1"; shift 1;;
- -e) EDEX_MODE="Y"; edexProcs[$edexProcCount]="$1"; shift 1; let "edexProcCount+=1";;
- -a) ACCUM="$1"; shift 1;;
- -v) GRAB_REMOTE_VMSTAT="$1"; shift 1;;
-screen) GRAB_SCREENSHOT="$1"; shift 1;;
+ -t) GRAB_REMOTE_TOP="$1"; shift 1;;
+ -v) GRAB_REMOTE_VMSTAT="$1"; shift 1;;
+ -V) RUN_VERSIONS="$1"; shift 1;;
+ -z) TGZ_OUTPUT="$1"; shift 1;;
-h|*) usage;;
esac
done
@@ -401,6 +426,7 @@ checkYes FORCE $FORCE
checkYes MOVE_ALL_HS_ERR_PID $MOVE_ALL_HS_ERR_PID
checkYes GRAB_REMOTE_TOP $GRAB_REMOTE_TOP
checkYes GRAB_REMOTE_VMSTAT $GRAB_REMOTE_VMSTAT
+checkYes GRAB_CURRENT_QUERIES $GRAB_CURRENT_QUERIES
checkYes GRAB_CAVE_AND_ALERTVIZ_LOGS $GRAB_CAVE_AND_ALERTVIZ_LOGS
checkYes EDEX_MODE $EDEX_MODE
checkYes TGZ_OUTPUT $TGZ_OUTPUT
@@ -519,29 +545,29 @@ if [ ! -z "${myProcs}" ]; then
IFS=$PREV_IFS
launchJstacks
-
launchJmaps
-
- runQpidStat
-
- grabRemoteTop
-
- grabRemoteVmstat
-
else
t1=`date "+%Y%m%d %H:%M:%S"`
echo "*** NO processes found for user $user, capturing limited data to $dataPath"
echo "${t1}: NO processes found for $user" >> $processFile
echo "" >> $processFile
-
- runQpidStat
- grabRemoteTop
- grabRemoteVmstat
fi
+# grab top for servers
+grabRemoteTop
+
+# grab vm stat for servers
+grabRemoteVmstat
+
+# grab current database queries
+grabCurrentDatabaseQueries
+
# grab screen shot, spawns background process for each screen
grabScreenShot
+# grab qpid stat
+runQpidStat
+
# ls users home directory to check nas performance
/usr/bin/time -p ls -la ~ > ${dataPath}/nas_check_ls_home.txt 2>&1 &
diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py b/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py
index e0bdc4e7e5..243e7ca477 100644
--- a/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py
+++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/src/gpd/gpd.py
@@ -37,6 +37,22 @@ USAGE_MESSAGE = \
gpd spx --f filePath
gpd spg --f filePath --p prodName [--v versionNum]
+
+ gpd qig --p prodName --f filePath
+
+ gpd qigl --p prodName
+
+ gpd qpg --p prodName --t referenceTime [--f filePath --v versionNum]
+
+ gpd qpgl --p prodName --t referenceTime [--v versionNum]
+
+ gpd qsg --p prodName --t referenceTime --id stationId [--f filePath --v versionNum]
+
+ gpd qsgl --p prodName --t referenceTime --id stationId [--v versionNum]
+
+ gpd qmg --p prodName --t referenceTime --slat latitude --slon longitude [--f filePath --v versionNum]
+
+ gpd qmgl --p prodName --t referenceTime --slat latitude --slon longitude [--v versionNum]
gpd pe
@@ -62,27 +78,6 @@ SUBCOMMAND_MESSAGE = \
qmxl: To query and print a moving product
spx: To store product XML file to EDEX server database
spg: To store product GEMPAk table file to EDEX server database
- pe: To purge expired products
- pa: To purge all products
- u: To print usage
-"""
-'''
- gpd qig --p prodName --f filePath
-
- gpd qigl --p prodName
-
- gpd qpg --p prodName --t referenceTime [--f filePath --v versionNum]
-
- gpd qpgl --p prodName --t referenceTime [--v versionNum]
-
- gpd qsg --p prodName --t referenceTime --id stationId [--f filePath --v versionNum]
-
- gpd qsgl --p prodName --t referenceTime --id stationId [--v versionNum]
-
- gpd qmg --p prodName --t referenceTime --slat latitude --slon longitude [--f filePath --v versionNum]
-
- gpd qmgl --p prodName --t referenceTime --slat latitude --slon longitude [--v versionNum]
-
qig: To query product information, result saved at optional filePath
qigl: To query and list product information
qpg: To query a product (all stations), result saved at optional filePath
@@ -91,7 +86,10 @@ SUBCOMMAND_MESSAGE = \
qsgl: To query and list a station product
qmg: To query a moving product, result saved at optional filePath
qmgl: To query and print a moving product
-'''
+ pe: To purge expired products
+ pa: To purge all products
+ u: To print usage
+"""
def __initLogger():
global logger
logger = logging.getLogger("gpd")
@@ -154,7 +152,7 @@ def __parseCommandLine():
#parser_info_printXml.add_argument("--f", dest="filePath", action="store",
# help=":target file path for return product")
parser_info_printXml.set_defaults(func=__getPrintProdInfoXml)
- '''
+
parser_infoGempak = subparsers.add_parser('qig')
parser_infoGempak.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
@@ -166,7 +164,7 @@ def __parseCommandLine():
parser_info_printGempak.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
parser_info_printGempak.set_defaults(func=__getPrintProdInfoGempak)
- '''
+
#To query a station product (single station)
parser_stnProdXml = subparsers.add_parser('qsx')
parser_stnProdXml.add_argument("--p", dest="prodName", action="store",required=True,
@@ -192,7 +190,7 @@ def __parseCommandLine():
help=":product version")
parser_stnProdXml_print.set_defaults(func=__getPrintStnProdXml)
- '''
+
parser_stnProdGempak = subparsers.add_parser('qsg')
parser_stnProdGempak.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
@@ -216,7 +214,7 @@ def __parseCommandLine():
parser_stnProdGempak_print.add_argument("--v", dest="versionNum", action="store",
help=":product version")
parser_stnProdGempak_print.set_defaults(func=__getPrintStnProdGempak)
- '''
+
#To query a moving product
parser_movingProdXml = subparsers.add_parser('qmx')
parser_movingProdXml.add_argument("--p", dest="prodName", action="store",required=True,
@@ -246,7 +244,7 @@ def __parseCommandLine():
help=":product version")
parser_movingProdXml_print.set_defaults(func=__getPrintMovingProdXml)
- '''
+
parser_movingProdGempak = subparsers.add_parser('qmg')
parser_movingProdGempak.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
@@ -274,7 +272,7 @@ def __parseCommandLine():
parser_movingProdGempak_print.add_argument("--v", dest="versionNum", action="store",
help=":product version")
parser_movingProdGempak_print.set_defaults(func=__getPrintMovingProdGempak)
- '''
+
#To query a product
parser_prodXml = subparsers.add_parser('qpx')
@@ -288,7 +286,7 @@ def __parseCommandLine():
help=":product version")
parser_prodXml.set_defaults(func=__getProdXml)
- '''
+
parser_prodGempak = subparsers.add_parser('qpg')
parser_prodGempak.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
@@ -299,7 +297,7 @@ def __parseCommandLine():
parser_prodGempak.add_argument("--v", dest="versionNum", action="store",
help=":product version")
parser_prodGempak.set_defaults(func=__getProdGempak)
- '''
+
parser_prodXml_print = subparsers.add_parser('qpxl')
parser_prodXml_print.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
@@ -309,7 +307,7 @@ def __parseCommandLine():
help=":product version")
parser_prodXml_print.set_defaults(func=__getPrintProdXml)
- '''
+
parser_prodGempak_print = subparsers.add_parser('qpgl')
parser_prodGempak_print.add_argument("--p", dest="prodName", action="store",required=True,
help=":name of a Generic Point Data product")
@@ -319,7 +317,7 @@ def __parseCommandLine():
help=":product version")
parser_prodGempak_print.set_defaults(func=__getPrintProdGempak)
- '''
+
'''
#parser_purge_prod = subparsers.add_parser('pp')
diff --git a/localization/localization.OAX/utility/common_static/site/OAX/warngen/config.xml b/localization/localization.OAX/utility/common_static/site/OAX/warngen/config.xml
deleted file mode 100644
index 608df36aa5..0000000000
--- a/localization/localization.OAX/utility/common_static/site/OAX/warngen/config.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-
-
-
- OMAHA/VALLEY NE
- OMAHA
- EAX/KANSAS CITY,DMX/DES MOINES,BOX/BOSTON,LBF/NORTH PLATTE,PQR/PORTLAND
- OMA
- severethunderstorm
- Flash Flood/ffw,Severe Thunderstorm/severethunderstorm,Tornado/tornado
- Severe Weather Statement/SVS,Flash Flood Statement/ffs,non-convective FFW (Dam Break)/dambreak,non-convective Flash Flood Statement/dambreakffs,Areal Flood Warning/flw,Areal Flood Warning Followup/fls,Areal Flood Advisory/fla,Areal Flood Advisory Followup/flas,Special Marine Warning/smw,Marine Weather Statement (SMW Follow)/smws,Marine Weather Statement standalone/marinestatement,Short Term Forecast/shortterm,Special Weather Statement (zones)/sws
- 5000
-
\ No newline at end of file
diff --git a/msi/VizLauncher/VizLauncher.suo b/msi/VizLauncher/VizLauncher.suo
index 610c195a0e..9b44de8eee 100644
Binary files a/msi/VizLauncher/VizLauncher.suo and b/msi/VizLauncher/VizLauncher.suo differ
diff --git a/msi/VizLauncher/VizLauncher/com/raytheon/viz/launcher/process/AbstractProcessLauncher.cs b/msi/VizLauncher/VizLauncher/com/raytheon/viz/launcher/process/AbstractProcessLauncher.cs
index db02974126..31a4bf9a90 100644
--- a/msi/VizLauncher/VizLauncher/com/raytheon/viz/launcher/process/AbstractProcessLauncher.cs
+++ b/msi/VizLauncher/VizLauncher/com/raytheon/viz/launcher/process/AbstractProcessLauncher.cs
@@ -56,11 +56,10 @@ namespace VizLauncher.com.raytheon.viz.launcher.process
{
ProcessStartInfo processStartInfo =
new ProcessStartInfo(this.constructProcessName(vizEnvironment.getLocation()));
- processStartInfo.EnvironmentVariables.Remove(EnvironmentProperties.ENVIRONMENT_VARIABLE_PATH);
- processStartInfo.EnvironmentVariables.Add(
- EnvironmentProperties.ENVIRONMENT_VARIABLE_PATH, vizEnvironment.getPath());
- processStartInfo.EnvironmentVariables.Add(
- EnvironmentProperties.ENVIRONMENT_VARIABLE_PYTHON_PATH, vizEnvironment.getPythonPath());
+ processStartInfo.EnvironmentVariables[EnvironmentProperties.ENVIRONMENT_VARIABLE_PATH] =
+ vizEnvironment.getPath();
+ processStartInfo.EnvironmentVariables[EnvironmentProperties.ENVIRONMENT_VARIABLE_PYTHON_PATH] =
+ vizEnvironment.getPythonPath();
processStartInfo.UseShellExecute = false;
processStartInfo.Arguments = this.getCommandLineArguments();
processStartInfo.RedirectStandardOutput = true;
diff --git a/msi/build/A2Staging/VisualStudio/VizLauncher.exe b/msi/build/A2Staging/VisualStudio/VizLauncher.exe
index 7b199ed61c..af03282d6f 100644
Binary files a/msi/build/A2Staging/VisualStudio/VizLauncher.exe and b/msi/build/A2Staging/VisualStudio/VizLauncher.exe differ
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh
new file mode 100644
index 0000000000..5ffcc8009c
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.ksh
@@ -0,0 +1,38 @@
+#!/usr/bin/ksh
+
+#setenv FXA_HOME /awips/fxa
+#setenv LOG_DIR /data/logs/fxa
+#source $FXA_HOME/readenv.csh
+
+RUN_FROM_DIR=`dirname $0`
+echo "RFD: $RUN_FROM_DIR"
+# set up SOME environment variables for WHFS applications
+. $RUN_FROM_DIR/../../set_hydro_env
+. $RUN_FROM_DIR/../../check_app_context
+
+#set NRLDB_DATA=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_data`
+#set NRLDB_LOG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`
+#set NRLDB_CONFIG=`/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_config`
+#set WHFS_BIN=`/awips/hydroapps/public/bin/get_apps_defaults.LX whfs_bin_dir`
+#cd /awips/hydroapps/whfs/local/data/backup_db/nrldb
+
+export NRLDB_DATA=$(get_apps_defaults nrldb_data)
+echo "NRLDB data: $NRLDB_DATA"
+
+export NRLDB_LOG=$(get_apps_defaults nrldb_log)
+echo "NRLDB log: $NRLDB_LOG"
+
+export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
+echo "NRLDB config: $NRLDB_CONFIG"
+
+export WHFS_BIN=$(get_apps_defaults whfs_bin_dir)
+echo "WHFS_BIN: $WHFS_BIN"
+
+export NRLDBLOGFILE=${NRLDB_LOG}/nrldb.log
+export NRLDBTMPFILE=${NRLDB_LOG}/nrldb.tmp
+tail -5000 $NRLDBLOGFILE > $NRLDBTMPFILE
+mv $NRLDBTMPFILE $NRLDBLOGFILE
+
+${WHFS_BIN}/nrldb.pl -t wfo -u
+
+#
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl
new file mode 100644
index 0000000000..409152e903
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/nrldb.pl
@@ -0,0 +1,1415 @@
+#!/usr/bin/perl
+
+use strict;
+use DBI;
+use AppConfig qw(:expand :argcount);
+
+
+#Set/define command line args
+my %cfg = ( DEBUG => 0); # debug mode on or off
+my $config = AppConfig->new(\%cfg); # create config object
+$config->define('type',{ARGCOUNT => ARGCOUNT_ONE, VALIDATE => '(WFO|RFC|HQ|wfo|rfc|hq)', ALIAS => 'T'});
+$config->define('local-control-file',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'L',DEFAULT => 0});
+$config->define('upload',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'U', DEFAULT => 0});
+$config->define('wfo-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'W', DEFAULT => 0});
+$config->define('rfc-id',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'R', DEFAULT => 0});
+$config->define('out-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'O', DEFAULT => 0});
+$config->define('input-xmlfile',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'I', DEFAULT => 0});
+$config->define('check',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'C', DEFAULT => 0});
+$config->define('verbose',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'V', DEFAULT => 0});
+$config->define('dbname',{ARGCOUNT => ARGCOUNT_ONE, ALIAS => 'D', DEFAULT => 0});
+$config->define('extract',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'E', DEFAULT => 0});
+$config->define('delete',{ARGCOUNT => ARGCOUNT_NONE, ALIAS => 'A', DEFAULT => 0});
+$config->getopt(\@ARGV);
+
+our $type = uc($config->get('type'));
+our $localControlFile = $config->get('local-control-file');
+our $Upload = $config->get('upload');
+our $wfoID = uc($config->get('wfo-id'));
+our $rfcID = uc($config->get('rfc-id'));
+our $outFile = $config->get('out-xmlfile');
+our $inFile = $config->get('input-xmlfile');
+our $check = $config->get('check');
+our $verbose = $config->get('verbose');
+our $dbname_flag = $config->get('dbname');
+our $extract = $config->get('extract');
+our $delete = $config->get('delete');
+our $office;
+our $update_count = 0;
+our $insert_count = 0;
+our $error_count = 0;
+our $total_count = 0;
+our $file_name;
+our $conf_dir;
+my ($dbname, $host, $user, $pass, $nrldb_host, $backup_host);
+my @delete_list;
+my $delete_listRef;
+print "db name flag: $dbname_flag\n";
+if($check) {
+ warn "-----Starting NRLDB installation check-----\nInstallation Complete.\n";
+ print "Installation Complete.\n";
+ exit 0;
+}
+
+
+#Get config file info
+($dbname, $host, $user, $pass, $nrldb_host, $office, $backup_host) = read_config_file();
+
+if(!$dbname_flag)
+{
+ if( -e "/awips/hydroapps/public/bin/get_apps_defaults")
+ {
+ $dbname = `/awips/hydroapps/public/bin/get_apps_defaults.LX db_name`;
+ }
+}
+else{
+ $dbname = $dbname_flag;
+}
+# Do parameter checks
+if($type eq "")
+{
+ print "No office type specified.\nusage: --type WFO|RFC|HQ\n\n";
+ exit 1;
+}
+if($type eq "HQ")
+{
+ if($inFile eq 0)
+ {
+ print "No xml input file specified.\nusage: --type HQ --input-xmlfile 'file'\n\n";
+ exit 1;
+ }
+ if($rfcID eq 0 && $wfoID eq 0)
+ {
+ print "You must specify a WFO/RFC office identifier with the HQ type.\n";
+ exit 1;
+ }
+
+ unless($rfcID eq 0) {
+ $office = $rfcID;
+ }
+ unless($wfoID eq 0) {
+ $office = $wfoID;
+ }
+
+}
+
+if($type eq "RFC")
+{
+ if($rfcID eq 0)
+ {
+ print "You must specify an RFC office identifier with the rfc option.\nusage: --type RFC --rfc-id IDRFC\n\n";
+ exit 1;
+ }
+}
+
+
+#Connect to database
+our $db = db_connect($dbname, $host, $user, $pass);
+
+my $date = getdate();
+print "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n" if($verbose);
+warn "---Starting NRLDB process at $office\, running as $type\---\n---$date\n\n";
+print "Connected to database: $dbname\n" if($verbose);
+warn "Connected to database: $dbname\n";
+#Determine what type of office is running nrldb software
+if(($type eq "WFO") | ($type eq "RFC"))
+{
+ if($localControlFile eq 0)
+ {
+ download_control_file($type);
+ }
+ create_xml();
+ if($Upload)
+ {
+ upload_xml($nrldb_host);
+ upload_xml($backup_host);
+ }
+}
+elsif($type eq "HQ")
+{
+ if($delete)
+ {
+ $delete_listRef = get_delete_list();
+ @delete_list = @$delete_listRef;
+ foreach my $delete_table (@delete_list)
+ {
+ deleteValues($delete_table);
+ }
+ }
+ xml_parse();
+}
+
+print "\n-----------------------------\n\n" if($verbose);
+warn "\n-----------------------------\n\n";
+exit 0;
+
+
+# sub 'create_xml' is responsible for querying the database and putting the info into xml format.
+sub create_xml
+{
+
+my $table_name;
+my ($select_string, $field_string);
+my $xml_string;
+my $record_count;
+my ($st, $at);
+my $table_query;
+my $query_error_flag;
+my $numrows;
+my $lid_flag;
+my $pkey;
+my ($pk_name, $field_name);
+my $row;
+my $extract_detail;
+my %infohash;
+my @tables;
+my @fields;
+my @fields_all;
+my @select_array;
+my @PK;
+my @keys;
+my (@pk_output, @fields_output);
+
+#read control file and put specified fields into array
+my ($tables_ref, $fields_all_ref) = read_control_file();
+@tables = @$tables_ref;
+@fields_all = @$fields_all_ref;
+
+ $extract_detail = '';
+# print "EXTRACT: $extract\n";
+ unless($extract eq 0)
+ {
+ $extract_detail = extract_detail();
+ }
+
+# Start creating xml
+$xml_string = "\n\n";
+foreach $table_name (@tables)
+{
+
+ print "TABLE: $table_name\n" if($verbose);
+ warn "TABLE: $table_name\n";
+ $select_string = "";
+ $lid_flag = 1;
+ # Get primary key list for specified tables
+ @keys = $db->primary_key(undef, undef, $table_name);
+
+ foreach $pkey (@keys)
+ {
+ # The following 6 lines were by mark Armstrong (HSD) on 2/26/09
+ # to remove the quotes from primary keys.
+ # When primary keys occurred with quotes, the update queries
+ # were not successful.
+ if ($pkey =~ /"/){
+ my $length_pkey = length $pkey;
+ $length_pkey -= 2;
+ my $new_pkey = substr($pkey,1,$length_pkey);
+ $pkey=$new_pkey;
+ }
+ push(@PK, "$table_name.$pkey");
+ }
+
+ @pk_output = grep(/$table_name\.\w*/, @PK);
+ print "\tPK: @pk_output\n" if($verbose);
+ warn "\tPK: @pk_output\n";
+ @fields_output = grep(/$table_name\.\w*/, @fields_all);
+ print "\tFIELDS: @fields_output\n" if($verbose);
+ warn "\tFIELDS: @fields_output\n";
+
+ my $pk_count = @pk_output;
+ if($pk_count == 0)
+ {
+ print "No Primary Keys found for Table: $table_name\nContinuing\n\n" if($verbose);
+ warn "No Primary Keys found for Table: $table_name\nContinuing\n\n";
+ next;
+ }
+
+ #loop through arrays and put together a select string for specified table
+ foreach my $pk (@pk_output)
+ {
+ if($pk =~ /$table_name\.\w*/)
+ {
+ if($select_string eq "")
+ {
+ $select_string = "$pk";
+ }
+ else
+ {
+ $select_string .= ",$pk";
+ }
+ }
+ }
+
+
+ foreach my $fields (@fields_output)
+ {
+ if($select_string =~ /.*$fields.*/)
+ {
+ if($field_string eq "")
+ {
+ $field_string = "$fields";
+ }
+ else
+ {
+ $field_string .= ",$fields";
+ }
+ next;
+ }
+ elsif($fields =~ /.*ALL.*/)
+ {
+ $select_string = "*";
+ last;
+ }
+ else
+ {
+ if($field_string eq "")
+ {
+ $field_string = "$fields";
+ }
+ else
+ {
+ $field_string .= ",$fields";
+ }
+ $select_string .= ",$fields";
+ }
+ }
+
+
+ #print select string to be used
+ print "\n" if($verbose);
+ warn "\n";
+ $query_error_flag = 0;
+ #if select string equal 'ALL' get a list of all fields in specified table by querying database info tables.
+ if($select_string eq "*")
+ {
+
+ my $query_column1 = "SELECT c.oid
+ FROM pg_catalog.pg_class c
+ LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
+ WHERE pg_catalog.pg_table_is_visible(c.oid)
+ AND c.relname ~ '^$table_name\$'";
+
+ my $attribute_query = "SELECT a.attname
+ FROM pg_catalog.pg_attribute a
+ WHERE a.attnum > 0 AND NOT a.attisdropped
+ AND a.attrelid = ($query_column1)
+ ORDER BY a.attnum;";
+
+ eval
+ {
+ $at = $db->prepare($attribute_query);
+ $at->execute() or die "Cannot execute: ".$at->errstr();
+ };
+ if($@)
+ {print "$@\n" if($verbose); warn "$@\n";}
+
+ my $att_count = 0;
+ while ( defined ( my $attribues = $at->fetchrow_arrayref() ) )
+ {
+ if($att_count > 0)
+ {
+ $select_string .= ",$table_name.@$attribues[0]";
+ }
+ else
+ {
+ $select_string = "$table_name.@$attribues[0]";
+ }
+ $att_count++;
+ }
+ $field_string = $select_string;
+ }
+
+ #Check for lid in table
+ if($select_string !~ /$table_name\.lid/)
+ {
+ $lid_flag = lid_check($table_name);
+ }
+
+ # Determine query depending on office type and other parameters
+ ## Revised query to properly select only counties from primary HSA or identified WFO - Ernie Wells February 09 ##
+ if($type eq "WFO")
+ {
+ if($wfoID eq 0) {
+ if($table_name =~ /location/)
+ {
+ $table_query = "SELECT $select_string FROM location, admin WHERE location.hsa = admin.hsa $extract_detail ORDER BY lid;";
+ } elsif($table_name =~ /counties/) {
+ $table_query = "SELECT $select_string FROM counties, admin WHERE counties.wfo = admin.hsa;";
+ } elsif($table_name =~ /rpffcstgroup/) {
+ $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid join admin on l.hsa = admin.hsa;";
+ } elsif($table_name =~ /vtecevent/) {
+ $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location, admin where location.hsa = admin.hsa) $extract_detail;";
+ } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){
+ my $cutoff_dtime = getcutoffdate();
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
+ } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) {
+ my $cutoff_dtime = getcutoffdate();
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
+ } elsif($lid_flag == 1){
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location, admin WHERE location.lid = $table_name.lid AND location.hsa = admin.hsa) $extract_detail ORDER BY lid;";
+ }
+ else {
+ $table_query = "SELECT $select_string FROM $table_name\;";
+ }
+ }
+ else {
+ if($table_name =~ /location/)
+ {
+ if($extract eq 0) {
+ $table_query = "SELECT $select_string FROM location WHERE location.hsa = '$wfoID' $extract_detail ORDER BY lid;";
+ } else {
+ $table_query = "SELECT $select_string FROM location WHERE location.hsa like '%' $extract_detail ORDER BY lid;";
+ }
+ } elsif($table_name =~ /counties/) {
+ if($extract eq 0) {
+ $table_query = "SELECT $select_string FROM counties WHERE counties.wfo = '$wfoID';";
+ } else {
+ $table_query = "SELECT $select_string FROM counties WHERE counties.wfo in (select hsa from location where hsa is not null $extract_detail) ;";
+ }
+ } elsif($table_name =~ /rpffcstgroup/) {
+ if($extract eq 0) {
+ $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa = '$wfoID';";
+ } else {
+ my $rpgroup_extract_detail = $extract_detail;
+ $rpgroup_extract_detail =~ s/lid/l.lid/g;
+ $table_query = "SELECT distinct $select_string from rpffcstgroup join rpffcstpoint rp on rp.group_id = rpffcstgroup.group_id join location l on l.lid = rp.lid where l.hsa is not null $rpgroup_extract_detail;";
+ }
+ } elsif($table_name =~ /vtecevent/) {
+ if($extract eq 0) {
+ $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa = '$wfoID') ;";
+ } else {
+ my $vtec_extract_detail = $extract_detail;
+ $vtec_extract_detail =~ s/lid/geoid/g;
+ print "vtec_extract_detail: $vtec_extract_detail\n";
+ $table_query = "SELECT $select_string FROM vtecevent WHERE vtecevent.geoid in (select location.lid from location where location.hsa is not null) $vtec_extract_detail;";
+ }
+ } elsif($table_name eq "height" || $table_name =~ /temperature/ || $table_name =~ /curpp/ || $table_name =~ /curpc/ || $table_name eq "discharge"){
+ my $cutoff_dtime = getcutoffdate();
+ if($extract eq 0) {
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and obstime > '$cutoff_dtime' ORDER BY lid;";
+ } else {
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid ) and obstime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
+ }
+ } elsif($table_name =~ /fcstheight/ || $table_name =~ /fcstdischarge/) {
+ my $cutoff_dtime = getcutoffdate();
+ if($extract eq 0) {
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') and basistime > '$cutoff_dtime' ORDER BY lid;";
+ } else {
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) and basistime > '$cutoff_dtime' $extract_detail ORDER BY lid;";
+ }
+ } elsif($lid_flag == 1) {
+ if($extract eq 0) {
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid AND location.hsa = '$wfoID') $extract_detail ORDER BY lid;";
+ } else {
+ $table_query = "SELECT $select_string FROM $table_name WHERE exists (SELECT lid FROM location WHERE location.lid = $table_name.lid) $extract_detail ORDER BY lid;";
+ }
+ } else {
+ $table_query = "SELECT $select_string FROM $table_name\;";
+ }
+ }
+ } elsif($type eq "RFC") {
+ if($table_name =~ /location/) {
+ $table_query = "SELECT $select_string FROM location WHERE location.rfc='$rfcID' $extract_detail ORDER BY lid;";
+ } elsif($lid_flag == 1) {
+ $table_query = "SELECT $select_string from $table_name where exists (select lid from location where
+location.lid = $table_name.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;";
+ # $table_query = "SELECT $select_string from $table_name where exists (select lid from location where
+#location.lid=rating.lid and location.rfc='$rfcID') $extract_detail ORDER BY lid;";
+ } else {
+ $table_query = "SELECT $select_string FROM $table_name\;";
+ }
+ }
+
+ # print the query for log purpose and execute the query
+ print "$table_query\n\n" if($verbose);
+ warn "$table_query\n\n";
+ $record_count = 0;
+ eval
+ {
+ $st = $db->prepare($table_query);
+ $row = $db->selectall_arrayref($st,{Slice => {}});
+ #$st->execute() or die "Cannot execute: ".$st->errstr();
+ };
+ if ($@)
+ {
+ print "$@\n" if($verbose);
+ warn "$@\n";
+ $xml_string .= "
\n";
+ $query_error_flag = 1;
+ }
+
+ # if no db error continue adding info to xml file for the table.
+ if($query_error_flag == 0)
+ {
+ $numrows = $st->rows;
+ print "Number of records obtained: $numrows\n" if($verbose);
+ warn "Number of records obtained: $numrows\n";
+ if ($numrows == 0)
+ {
+ $xml_string .= "
/)
+ {
+ print "Delete Table: $1\n" if($verbose);
+ warn "Delete Table: $1\n";
+ $table = $1;
+ while($rawLine[$i] !~ m/<\/Table>/)
+ {
+ if($rawLine[$i] =~ //)
+ {
+ $record_num = $1;
+ while ($rawLine[$i] !~ m/<\/Record>/)
+ {
+ if($rawLine[$i] =~ //)
+ { $i++;
+ while($rawLine[$i] !~ m/<\/PK>/)
+ {
+ if($rawLine[$i] =~ m/<$table\.lid>(.*)<\/$table\.lid>/)
+ {
+ if(($last_lid != -1) && ($last_lid eq $1))
+ {$i++; next;}
+ #print "$1\n";
+ if ($total_count == 0)
+ {
+ $delete_str .= "WHERE $table.lid = '$1'";
+ }
+ else
+ {
+ $delete_str .= " OR $table.lid = '$1'";
+ }
+
+ $last_lid = $1;
+
+ }
+ $i++;
+ }
+ }
+ $i++;
+ }
+ $total_count++;
+ }
+ $i++;
+ }
+ print "\tTotal Delete LIDs: $total_count\n" if($verbose);
+ warn "\tTotal Delete LIDs: $total_count\n";
+ $last_f = 1;
+ }
+ elsif ($rawLine[$i] =~ /<\/NRLDB>/)
+ {$last_f = 1;}
+ else
+ {$i++;}
+ }
+ #print "$delete_str, $total_count\n";
+ return ($delete_str, $total_count);
+
+}
+
+
+sub insertValues($table, $record_num, $PK_name, $PK_value, $Field_name, $Field_value)
+{
+ my $num;
+ my ($fields, $values);
+ my ($update_set, $update_where);
+ my $Field_value_quoted;
+ my $table = shift;
+ my $record_num = shift;
+ my $PK_name = shift;
+ my $PK_value = shift;
+ my $Field_name = shift;
+ my $Field_value = shift;
+ my $update_flag = 0;
+ my $st_handle;
+ my $insertrows;
+
+ for($num = 0; $num <= $#$Field_value; $num++)
+ {
+ if($num == 0)
+ {
+ $fields = "($$Field_name[$num]";
+ if($$Field_value[$num] ne "NULL")
+ {
+ $$Field_value[$num] = $db->quote($$Field_value[$num]);
+ $values = "($$Field_value[$num]";
+ $update_set = "$$Field_name[$num]=$$Field_value[$num]";
+ }
+ else
+ {
+ $values = "($$Field_value[$num]";
+ $update_set = "$$Field_name[$num]=$$Field_value[$num]";
+ }
+ }
+ else
+ {
+ $fields .= ", $$Field_name[$num]";
+ if($$Field_value[$num] ne "NULL")
+ {
+ $$Field_value[$num] =~ s/\n//g;
+ $$Field_value[$num] =~ s/\r//g;
+ $$Field_value[$num] = $db->quote($$Field_value[$num]);
+ $values .= ", $$Field_value[$num]";
+ $update_set .= ", $$Field_name[$num]=$$Field_value[$num]";
+ }
+ else
+ {
+ $values .= ", $$Field_value[$num]";
+ $update_set .= ", $$Field_name[$num]=$$Field_value[$num]";
+ }
+ }
+ }
+ for($num = 0; $num <= $#$PK_name; $num++)
+ {
+ if($num == 0)
+ {
+ $$PK_value[$num] = $db->quote($$PK_value[$num]);
+ $update_where = "$$PK_name[$num]=$$PK_value[$num] ";
+ }
+ else
+ {
+ $$PK_value[$num] = $db->quote($$PK_value[$num]);
+ $update_where .= "AND $$PK_name[$num]=$$PK_value[$num]";
+ }
+ }
+
+ $fields .= ")";
+ $values .= ")";
+ my $insert_cmd = "INSERT INTO $table $fields VALUES $values\;";
+ my $update_cmd = "UPDATE $table SET $update_set WHERE $update_where\;";
+
+ eval {
+ $insert_count++;
+ $st_handle = $db->prepare($insert_cmd);
+ $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr();
+ $insertrows = $st_handle->rows();
+ if($insertrows == 0)
+ {
+ $insert_count--;
+ $error_count++;
+ print "ZERO ROWS FOR QUERY: $insert_cmd\n\n" if($verbose);
+ warn "ZERO ROWS FOR QUERY: $insert_cmd\n\n";
+ }
+ };
+
+ if ($@) {
+ if($@ =~ /duplicate key/)
+ {
+ $update_flag = 1;
+ $insert_count--;
+ }
+ else
+ {
+ print "$@\n" if($verbose);
+ warn "$@\n";
+ $insert_count--;
+ $error_count++;
+ print "INSERT ERROR ON QUERY: $insert_cmd\n\n" if($verbose);
+ warn "INSERT ERROR ON QUERY: $insert_cmd\n\n";
+
+ }
+ }
+
+ if($update_flag == 1)
+ {
+ eval {
+ $update_count++;
+ $st_handle = $db->prepare($update_cmd);
+ $st_handle->execute() or die "Cannot execute: ".$st_handle->errstr();
+ $insertrows = $st_handle->rows();
+ if($insertrows == 0)
+ {
+ $update_count--;
+ $error_count++;
+ print "ZERO ROWS FOR QUERY: $update_cmd\n\n" if($verbose);
+ warn "ZERO ROWS FOR QUERY: $update_cmd\n\n";
+ }
+ };
+
+ if ($@) {
+ print "$@\n" if($verbose);
+ warn "$@\n";
+ $update_count--;
+ $error_count++;
+ print "UPDATE ERROR ON QUERY: $update_cmd\n\n" if($verbose);
+ warn "UPDATE ERROR ON QUERY: $update_cmd\n\n";
+ }
+ }
+
+}
+
+
+sub db_connect
+{
+my $dbname = shift;
+my $host = shift;
+my $user = shift;
+my $pass = shift;
+
+my %db_attr = (
+ PrintError => 0,
+ RaiseError => 0,
+);
+
+my $dsn = "DBI:Pg:dbname=$dbname;host=$host";
+my $db = DBI->connect($dsn, $user, $pass, \%db_attr) or die "Can't connect() to database $dbname: $DBI::errstr";
+return ($db);
+}
+
+sub upload_xml
+{
+ print "---UPLOAD XML FILE----\n" if($verbose);
+ warn "---UPLOAD XML FILE----\n";
+ my $upload_string = "rsync -av --chmod=ugo+rw $outFile.zip $nrldb_host\::nrldb_xml/";
+ print "$upload_string\n" if($verbose);
+ warn "$upload_string\n";
+ my $upload_exe = `$upload_string`;
+ print "$upload_exe\n" if($verbose);
+ warn "$upload_exe\n";
+ print "Failed: \"$upload_string\"\n" if ($? && $verbose);
+ warn "Failed: \"$upload_string\"\n" if $?;
+ return;
+}
+sub download_control_file
+{
+ my $office_type = shift;
+ my $download_string;
+ print "---DOWNLOAD $office_type CONTROL FILE----\n" if($verbose);
+ warn "---DOWNLOAD $office_type CONTROL FILE----\n";
+
+ if ($office_type eq "WFO")
+ {
+ $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_wfo ${conf_dir}/";
+ }
+ elsif ($office_type eq "RFC")
+ {
+ $download_string = "rsync -av $nrldb_host\::nrldb_control/nrldb_control_rfc ${conf_dir}/";
+ }
+ print "$download_string\n" if($verbose);
+ warn "$download_string\n";
+ my $download_exe = `$download_string`;
+ print "$download_exe\n" if($verbose);
+ warn "$download_exe\n";
+ print "Failed: \"$download_string\"\n" if ($? && $verbose);
+ warn "Failed: \"$download_string\"\n" if $?;
+ return;
+}
+
+sub getdate()
+{
+my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = localtime(time) ;
+my $RealMonth = $Month + 1 ; # Months of the year are not zero-based
+my $FixedYear;
+
+if ($Hour < 10)
+{
+ $Hour = "0" . $Hour
+}
+
+if ($Minute < 10)
+{
+ $Minute = "0" . $Minute
+}
+
+if ($Second < 10)
+{
+ $Second = "0" . $Second
+}
+
+if ($RealMonth < 10)
+{
+ $RealMonth = "0" . $RealMonth;
+}
+
+if ($Day < 10)
+{
+ $Day = "0" . $Day;
+}
+
+if ($Year >= 100)
+{
+ $FixedYear = $Year - 100;
+}
+else
+{
+ $FixedYear = $Year;
+}
+
+if ($FixedYear < 10)
+{
+ $FixedYear = "0" . $FixedYear;
+}
+
+my $clean_date = "$Hour:$Minute:$Second $RealMonth/$Day/$FixedYear";
+
+return($clean_date);
+}
+
+sub lid_check {
+ my $table_name = shift;
+ my $at;
+ my $lid_flag = 0;
+
+ my $query_column1 = "SELECT c.oid
+ FROM pg_catalog.pg_class c
+ LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
+ WHERE pg_catalog.pg_table_is_visible(c.oid)
+ AND c.relname ~ '^$table_name\$'";
+
+ my $attribute_query = "SELECT a.attname
+ FROM pg_catalog.pg_attribute a
+ WHERE a.attnum > 0 AND NOT a.attisdropped
+ AND a.attrelid = ($query_column1)
+ ORDER BY a.attnum;";
+
+ eval {
+ $at = $db->prepare($attribute_query);
+ $at->execute() or die "Cannot execute: ".$at->errstr();
+ };
+ if($@) {
+ print "$@\n";
+ }
+
+ while ( defined ( my $attribues = $at->fetchrow_arrayref() ) ) {
+ if(@$attribues[0] =~ /^lid$/) {
+ $lid_flag = 1;
+ }
+ }
+
+return ($lid_flag);
+}
+
+BEGIN {
+ use CGI::Carp qw(carpout);
+ my $logDir;
+ if( -e "/awips/hydroapps/public/bin/get_apps_defaults"){
+ $logDir = `/awips/hydroapps/public/bin/get_apps_defaults.LX nrldb_log`;
+ chomp($logDir);
+ } else {
+ print "Could not access /awips/hydroapps/public/bin/get_apps_defaults.LX. Exiting\n";
+ exit -1;
+ }
+ print "log dirlogDir\n";
+ my $log = "${logDir}/nrldb.log";
+ open(LOG, ">>$log") or die "Unable to open $log. $! ";
+ carpout(*LOG);
+}
+
+END {
+ my $date = `date`;
+ print LOG "End $0 at $date\tElapsed time: " . (time - $^T) . " seconds\n\n";
+ close LOG;
+}
+
+sub getcutoffdate()
+{
+my ($Second, $Minute, $Hour, $Day, $Month, $Year, $WeekDay, $DayOfYear, $IsDST) = gmtime(time-172800) ;
+my $RealMonth = $Month + 1 ; # Months of the year are not zero-based
+my $FixedYear;
+
+if ($Hour < 10)
+{
+ $Hour = "0" . $Hour
+}
+
+if ($Minute < 10)
+{
+ $Minute = "0" . $Minute
+}
+
+if ($Second < 10)
+{
+ $Second = "0" . $Second
+}
+
+if ($RealMonth < 10)
+{
+ $RealMonth = "0" . $RealMonth;
+}
+
+if ($Day < 10)
+{
+ $Day = "0" . $Day;
+}
+
+ $FixedYear = $Year + 1900;
+
+my $clean_date = "$FixedYear-$RealMonth-$Day $Hour:$Minute";
+
+return($clean_date);
+}
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh
new file mode 100644
index 0000000000..4710156c93
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/send_nrldb_update.sh
@@ -0,0 +1,173 @@
+#!/bin/sh
+###############################################################################
+# This script is run at the field office to send ad-hoc updates to the NRLDB
+# server, then on to the AHPS CMS. It can be run at any time. It is designed
+# to send small, time-sensitive updates to the CMS. It takes two argument
+# lists:-table table names (comma-separated) and -lid lid names
+# (comma-separated). It parses the arguments, selects the updated data from
+# the database and builds an SQL formatted text file for use on the nrldb and
+# CMS databases. The SQL file contains a delete staement that deletes the
+# pre-existing data for the lid/table combinations, before running the inserts
+#
+# Usage: send_nrldb_update.sh -table ,,... -lid ,,...
+# Example: send_nrldb_update.sh -table rating,floodstmt -lid BRKM2,CBEM2
+#
+if [ $# -ne 4 ]
+then
+ echo "Incorrect number of arguments entered: $#"
+ echo "Correct Arguments are:"
+ echo "send_nrldb_update.sh -table table1,table2 -lid lid1,lid2"
+ echo "Any number of tables and lids may be specified, but they need to be in a comma separated list with no spaces between commas and table/lid names"
+ exit 0
+fi
+# set up SOME environment variables for NRLDB applications
+export apps_dir=/awips2/edex/data/share/hydroapps
+export EDEX_HOME=/awips2/edex
+export NRLDB_DATA=`get_apps_defaults nrldb_data`
+export NRLDB_LOG=$(get_apps_defaults nrldb_log)
+export NRLDB_CONFIG=$(get_apps_defaults nrldb_config)
+export db_name=$(get_apps_defaults db_name)
+export NRLDB_TMP=$(get_apps_defaults nrldb_tmp)
+export PGUSER=awips
+
+# get the nrldb host and wfo from the nrldb.conf file/database
+nrldb_host=`grep nrldb_host $NRLDB_CONFIG/nrldb.conf | cut -d= -f2 | sed 's/"//g' | sed 's/ //g'`
+wfo=`psql -d $db_name -c "select hsa from admin;" | tail -3 | head -1 | sed -e 's/ //g'`
+echo `date`
+
+# create the final SQL file that will be sent to the NRLDB host
+timestamp=`date +%Y%m%d%H%N`
+sql_file="${wfo}_update_${timestamp}.sql"
+if [ -f $sql_file ]
+then
+ rm $sql_file
+fi
+
+# build the list of tables/lids to send
+lid_list="XXXXX"
+table_list="XXXXX"
+while [ $# -gt 0 ]
+do
+ case "$1" in
+ -lid) lid_list="$2,";shift;;
+ -table) table_list="$2,";shift;;
+ *) break;;
+ esac
+ shift
+done
+
+# set the last update information for update_nrldb.pl to use
+echo `date` > ${NRLDB_LOG}/last_nrldb_update.txt
+up_lid_list=`echo $lid_list | sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/'`
+echo "lid list: $up_lid_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
+echo "table_list: $table_list" >> ${NRLDB_LOG}/last_nrldb_update.txt
+
+#loop through the tables/lids
+if [ $table_list != "XXXXX" ]
+then
+ pos=1
+ table="XXXXX"
+ ltable=`echo $table | wc -m`
+ while [ $ltable -gt 4 ]
+ do
+ table=`echo $table_list | cut -d"," -f$pos`
+ pos=`expr $pos + 1`
+ ltable=`echo $table | wc -m`
+ if [ $ltable -gt 4 ]
+ then
+ lid="XXXXX"
+ lpos=1
+ llid=`echo $lid | wc -m`
+ while [ $llid -gt 3 ]
+ do
+ lid=`echo $up_lid_list | cut -d"," -f$lpos`
+ lpos=`expr $lpos + 1`
+ llid=`echo $lid | wc -m`
+ if [ $llid -gt 3 ]
+ then
+ # fetch the values from the DB and edit them
+ export PGUSER=awips
+ touch $NRLDB_TMP/update.txt
+ chmod ugo+rw $NRLDB_TMP/update.txt
+ ls -l $NRLDB_TMP/update.txt
+ psql -d $db_name -c "copy (select * from $table where lid = '$lid') to '$NRLDB_TMP/update.txt' with delimiter '|';"
+ cp $NRLDB_TMP/update.txt ${NRLDB_DATA}/update.txt
+ sed -f ${NRLDB_CONFIG}/sed_script.txt ${NRLDB_TMP}/update.txt > ${NRLDB_DATA}/update11.txt
+ sed -e "s/|/'|'/g" ${NRLDB_DATA}/update11.txt > ${NRLDB_DATA}/update1.txt
+ sed -e "s/^/insert into $table values('/g" ${NRLDB_DATA}/update1.txt > ${NRLDB_DATA}/update2.txt
+ sed -e "s/$/');/g" ${NRLDB_DATA}/update2.txt > ${NRLDB_DATA}/update3.txt
+ sed -e "s/|/,/g" ${NRLDB_DATA}/update3.txt > ${NRLDB_DATA}/update4.txt
+ if [ -f "${NRLDB_DATA}/update.txt" ]
+ then
+ update_lines=`wc -l "${NRLDB_DATA}/update.txt" | cut -d" " -f1`
+ else
+ echo "No update file found".
+ update_lines=0
+ fi
+ if [ $update_lines -gt 0 ]
+ then
+ if [ $table != "location" -a $table != "riverstat" ]
+ then
+ echo "delete from $table where lid = '$lid';" >> ${NRLDB_DATA}/$sql_file
+ fi
+ cat ${NRLDB_DATA}/update4.txt >> ${NRLDB_DATA}/$sql_file
+ fi
+ # location and riverstat require a special forecast since they have dependent tables via foreign keys
+ if [ $table = "location" ]
+ then
+ sql_stmt="update location set lid = '$lid'"
+ for col in county coe cpm detail elev hdatum hsa hu lat lon lremark lrevise name network rb rfc sbd sn state waro wfo wsfo type des det post stntype tzone
+ do
+ psql -d $db_name -c "select $col from location where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
+ ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
+ if [ $ct_zero -eq 0 ]
+ then
+ export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
+ new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
+ sql_stmt="$sql_stmt, $col = '$new_val'"
+ fi
+ done
+ sql_stmt="$sql_stmt where lid = '$lid';"
+ echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
+
+ elif [ $table = "riverstat" ]
+ then
+ sql_stmt="update riverstat set lid = '$lid'"
+ for col in primary_pe bf cb da response_time threshold_runoff fq fs gsno level mile pool por rated lat lon remark rrevise rsource stream tide backwater vdatum action_flow wstg zd ratedat usgs_ratenum uhgdur use_latest_fcst
+ do
+ psql -d $db_name -c "select $col from riverstat where lid = '$lid' and $col is not null;" > ${NRLDB_DATA}/update.txt
+ ct_zero=`grep -c "0 row" ${NRLDB_DATA}/update.txt`
+ if [ $ct_zero -eq 0 ]
+ then
+ export val=`cat ${NRLDB_DATA}/update.txt | head -3 | tail -1 | cut -c2-80`
+ new_val=`echo "$val" | sed -f ${NRLDB_CONFIG}/sed_script.txt`
+ sql_stmt="$sql_stmt, $col = '$new_val'"
+ fi
+ done
+ sql_stmt="$sql_stmt where lid = '$lid';"
+ echo $sql_stmt >> ${NRLDB_DATA}/$sql_file
+ fi
+ fi
+ done
+ fi
+
+ done
+
+ # send the SQL file to the NRLDB server
+ if [ -f ${NRLDB_DATA}/$sql_file ]
+ then
+ rsync -av ${NRLDB_DATA}/$sql_file ${nrldb_host}\::nrldb_update/
+ echo "SQL file: $sql_file created for lids: $up_lid_list and tables: $table_list"
+ else
+ echo "No SQL file created. Database contained no entries for lids: $up_lid_list and tables: $table_list"
+ fi
+fi
+
+# remove the temp files to keep the directory clean
+for temp_file in ${NRLDB_DATA}/update.txt ${NRLDB_DATA}/update11.txt ${NRLDB_DATA}/update1.txt ${NRLDB_DATA}/update2.txt ${NRLDB_DATA}/update3.txt ${NRLDB_DATA}/update4.txt
+do
+ if [ -f $temp_file ]
+ then
+ rm $temp_file
+ fi
+done
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl
new file mode 100644
index 0000000000..0a0a08728c
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/bin/update_nrldb.pl
@@ -0,0 +1,274 @@
+#!/usr/bin/perl
+################################################################################
+# update_nrldb.pl is the GUI for the Ad-Hoc update process. ## This process was put in place so that WFOs could update information #
+# between daily runs of the NRLDB update process. The information is #
+# collected at the WFO, sent to the NRLDB central server and then forwarded to #
+# CMS servers outside of the AWIPS firewall. #
+# #
+# Developer: Mark Armstrong (OCWWS/HSD) #
+# Developed 2011 - Modified for AWIPS2 2013 #
+################################################################################
+
+use Tk;
+use strict;
+use warnings;
+use AppConfig qw(:expand :argcount);
+use DBI;
+
+$ENV{EDEX_HOME}="/awips2/edex";
+$ENV{apps_dir}="/awips2/edex/data/share/hydroapps";
+our $BIN_DIR = `get_apps_defaults.LX whfs_bin_dir`;
+chomp($BIN_DIR);
+our $LOG_DIR = `get_apps_defaults.LX nrldb_log`;
+chomp($LOG_DIR);
+my $lids;
+my $tables;
+
+# Set up some inial configuration. Most of this comes from the hydroGen input file: hg.cfg
+$ENV{HYDROGENHOME} = "/awips/hydroapps/HydroGen" if ! defined $ENV{HYDROGENHOME};
+my %cfg = ( DEBUG => 0, # debug mode on or off
+ PEDANTIC => 0, # be patient with warnings/errors
+ CREATE => 1, # create variables, defining not required...
+ GLOBAL => { # for all config options unless overridden...
+ EXPAND => EXPAND_ALL, # expand ~, $ENV{*}, and $(var)
+ ARGCOUNT => ARGCOUNT_ONE, # each config expects an arg unless overriden...
+ ARGS => '=s' # each arg is a string unless overriden
+ }
+ );
+
+my $config = AppConfig->new(\%cfg); # create config object
+
+$config->define('version',{ ALIAS => 'V',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
+$config->define('help',{ ALIAS => 'h',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
+$config->define('man',{ ALIAS => 'm',ARGCOUNT => ARGCOUNT_NONE, ARGS => '!',DEFAULT => 0});
+$config->define('DBengine',{ VALIDATE => '[\w]+',DEFAULT => "Pg"});
+$config->define('DBname',{ VALIDATE => '[\w]+',DEFAULT => "hd_ob8xxx"});
+$config->define('DBhost',{ VALIDATE => '[-\w]+',DEFAULT => "dx1f"});
+$config->define('DBport',{ ARGS => '=i',DEFAULT => 5432});
+$config->define('master',{ VALIDATE => '[.\w]+',DEFAULT => "HGstation"});
+$config->define('basedir',{ VALIDATE => '[- /.\w]+',DEFAULT => $ENV{HYDROGENHOME} . "/bin"});
+
+$config->file($ENV{HYDROGENHOME} . "/input/hg.cfg"); # look in user's $HYDROGENHOME to find configured settings
+$config->args(\@ARGV); # get config settings from the command-line, overwriting any settings from the file...
+
+my $master = $config->get('master'); # name of DB table or view which holds master list of IDs for which MXD files are to be generated...
+my $DBengine = $config->get('DBengine');
+my $DBname = $config->get('DBname');
+my $DBhost = $config->get('DBhost');
+my $DBport = $config->get('DBport');
+my $baseDir = `pwd`;
+chomp $baseDir;
+my $DBstr;
+my $wildcard;
+
+#Open a database connection and get the list of LIDs from the IHFS DB
+if($DBengine eq "Pg") {
+ $DBstr = "dbi:$DBengine:dbname=$DBname;host=$DBhost;port=$DBport";
+ $wildcard = '%';
+} else {
+ $DBstr = "dbi:$DBengine:$DBname";
+ $wildcard = '*';
+}
+
+my $dbh = DBI->connect("$DBstr",undef,undef,{ChopBlanks => 1}) or warn $DBI::errstr;
+# creates the list of WFOs based on the HydroGen .xxx_backup files
+# and builds the query to create the list of LIDs
+my $wfo=`ls -a /awips/hydroapps/HydroGen/ | grep _backup | cut -c2-4`;
+my $list_len=length $wfo;
+my $num_wfos=$list_len/4;
+my $index=1;
+my $off=0;
+my $wfoid=substr($wfo,$off,3);
+my $wfoID=uc $wfoid;
+my $wfo_query = "(location.hsa = \'$wfoID\'";
+while ($index < $num_wfos){
+ $off+=4;
+ $wfoid=substr($wfo,$off,3);
+ $wfoID=uc $wfoid;
+ $wfo_query .= " or location.hsa = \'$wfoID\'";
+ $index++;
+}
+$wfo_query .= ")";
+
+#my $list_type="river";
+our $mw = MainWindow->new;
+$mw->title('Ad-Hoc NRLDB Update');
+
+my $lst_lab= $mw->Label(-text => 'Add any Unlisted Locations (comma-separated): ');
+my $sql = "select distinct hgstation.lid,location.name,location.hsa from hgstation,location where hgstation.lid = location.lid and $wfo_query order by 3,1;";
+
+# get the list of LIDs
+my $qhw = $dbh->prepare("$sql") or warn $DBI::errstr;
+
+our @lid_list; # = ($wildcard);
+
+#get the data from the DB
+get_results($qhw,\@lid_list);
+#print "ct: " . @lid_list;
+
+#set up a static array with the tables that are allowed for ad-hoc updates
+#table_list is the actual name of the DB tables, while tabledesc is a friendlier description that is displayed to the user
+our @table_list = ('location','riverstat','crest','floodstmt','hgstation','floodcat','lowwater');
+my @tabledesc = ('Location','Riverstat','Crest History','Impacts','HGstation','Flood Categories','Low Water');
+
+$dbh->disconnect();
+
+#manipulate the results of the lid/hsa/name query for better display
+my @liddeschsa;
+our @lidsend;
+$index=0;
+my $num_lids=scalar(@lid_list);
+while ($index < $num_lids){
+ my $line = $lid_list[$index];
+# print "line: $line\n";
+ my @results = split('\|',$line);
+ #my $lid = $lid_list[$index];
+ my $lid_lid = $results[0];
+ my $lid_name = $results[1];
+ my $lid_hsa = $results[2];
+# print "lid: $lid_lid name: $lid_name hsa: $lid_hsa\n";
+ push(@liddeschsa,"$lid_hsa | $lid_lid | $lid_name");
+ push(@lidsend,$lid_lid);
+ $index++;
+}
+
+# Create the GUI object
+#my $mw = MainWindow->new;
+#$mw->title('Ad-Hoc NRLDB Update');
+
+#my $lst_lab= $mw->Label(-text => 'Locations List: ');
+#my $lst_rad_riv = $mw-> Radiobutton(-text=>'AHPS River Points',
+# -value=>'river', -variable=>\$list_type);
+#my $lst_rad_precip = $mw-> Radiobutton(-text=>'Precip Points',
+# -value=>'precip', -variable=>\$list_type);
+# Labels for the LID and table scroll boxes
+my $misc_ent = $mw->Entry();
+my $label1 = $mw->Label(-text => 'HSA|LID|Location Name');
+my $label2 = $mw->Label(-text => 'Tables');
+
+# Create the scroll boxes for the LIDs and tables
+my $lb1 = $mw->Scrolled('Listbox',
+ -scrollbars => 'osoe',-width=>50,
+ -selectmode => 'multiple', -exportselection=>0);
+my $lb2 = $mw->Scrolled('Listbox',
+ -scrollbars => 'osow',-width=>20,
+ -selectmode => 'multiple',-exportselection=>0);
+
+# Add the arrays that we want to display in the list boxes
+$lb1->insert('end', @liddeschsa);
+$lb2->insert('end', @tabledesc);
+
+# Create the buttons
+my $exit = $mw->Button(-text => 'Exit',
+ -command => [$mw => 'destroy']);
+my $send = $mw->Button(-text => 'Send',
+ -command => \&send_button);
+my $show_log = $mw->Button(-text => 'Show Log',
+ -command => \&show_log);
+my $update_list = $mw->Button(-text => 'Update List', -command => \&upd_list);
+# create the label and text box for the last pdate window
+my $status_box = $mw->Text(-width=>20, -height=>3);
+my $lb_status = $mw->Label(-width=>20, -height=>3,-text=>"Last Ad-Hoc Update:");
+my $last_update = `cat $LOG_DIR/last_nrldb_update.txt`;
+
+$status_box->insert('end',"$last_update");
+
+# Crate the GUI using grid to specify the physical locations of the objects
+#$lst_rad_riv->grid(-row=>1, -column=>2, -columnspan=>1);
+#$lst_rad_precip->grid(-row=>1, -column=>3, -columnspan=>1);
+$label1->grid(-row=>1, -column=>1, -columnspan=>3) ;
+$label2->grid(-row=>1, -column=>4) ;
+$lb1->grid(-row=>2, -column=>1, -columnspan=>3, -sticky=>"ew") ;#pack;
+$lb2->grid(-row=>2, -column=>4, -columnspan=>1, -sticky=>"w") ;#pack;
+$lst_lab->grid(-row=>3, -column=>1, -columnspan=>1);
+$misc_ent->grid(-row=>3, -column=>2);
+$lb_status->grid(-row=>4, -column=>1);
+$status_box->grid(-row=>4, -column=>2, -columnspan=>3, -sticky=>"ew");
+$send->grid(-row=>5, -column=>1) ;#pack;
+$show_log->grid(-row=>5,-column=>2);
+$exit->grid(-row=>5, -column=>4) ;#pack;
+
+MainLoop;
+
+# End of main
+#
+#sub upd_list {
+# $mw => 'destroy';
+# my $cmd = "${DIR}/update_nrldb.pl.exp $list_type\n";
+# print "cmd: $cmd\n";
+# system($cmd);
+#}
+
+# The Send button functionality function
+sub send_button {
+ # Get the indices of the selected array items
+ my @LIDindex = $lb1->curselection;
+ my @Tableindex = $lb2->curselection;
+ my $index=1;
+ my $misc_lid = $misc_ent-> get();
+ # build the lists of LIDs and tables
+ $tables = $table_list[$Tableindex[0]];
+ my $numLIDs=@LIDindex;
+ print "numLIDs: $numLIDs\n";
+ my $numTables=@Tableindex;
+ if ($numLIDs > 0){
+ $lids = $lidsend[$LIDindex[0]];
+ while ($index < $numLIDs){
+ $lids .= "," . $lidsend[$LIDindex[$index]];
+ $index++;
+ }
+ $lids .= "," . $misc_lid;
+ } else {
+ $lids=$misc_lid;
+ }
+ $index=1;
+ while ($index < $numTables){
+ $tables .= "," . $table_list[$Tableindex[$index]];
+ $index++;
+ }
+# print "l0: ${lid_list[$LIDindex[0]]} t0: ${table_list[$Tableindex[0]]} lids: $lids tables: $tables\n";
+
+ # Create the call to the script and execute it using system()
+ my $cmd = "${BIN_DIR}/send_nrldb_update.sh -table $tables -lid $lids > ${LOG_DIR}/send_nrldb_update.log\n";
+# print "cmd: $cmd\n";
+ system($cmd);
+
+ # Create a dialog box to inform the user that their data has been sent
+ my $dsend=$mw->Dialog(-title=>'Sent NRLDB Update',-buttons=>['OK']);
+ my $text_field="NRLDB Update Sent for LIDs: $lids \n and tables: $tables\n";
+# my $addbox=$dsend->('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
+ my $box=$dsend->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
+ my $button = $dsend->Show;
+}
+# This subroutine, copied from Mark Fenbers bless program, takes a db query and returns an array of results
+sub get_results
+{
+ my $qh = shift;
+ my $array = shift;
+ my $record;
+
+#print "qh: $qh\n";
+ if(defined $qh) {
+ if($qh->execute(@_)) {
+ while($record = $qh->fetchrow_arrayref) {
+ foreach (@$record) { $_ = "" if ! defined $_; }
+ push @$array,(join '|',@$record);
+ }
+ } else {
+ warn $DBI::errstr;
+# print $qh->errstr;
+ }
+ } else { warn "unable to prepare query \"$sql\"\n"; }
+}
+
+#This subroutine displays the log from the send script in the form of a dialog box
+sub show_log
+{
+ use Tk::Dialog;
+ my $text_field=`cat ${LOG_DIR}/send_nrldb_update.log`;
+ my $d = $mw->Dialog(-title=>'Show Log',-buttons => ['OK']);
+ my $box=$d->add('Label',-text=>"$text_field")->pack(-side => 'left',-fill => 'both',-expand => 1);
+ my $button = $d->Show;
+# exit;
+}
+
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf
new file mode 100644
index 0000000000..4a3ce4eb68
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb.conf
@@ -0,0 +1,6 @@
+dbhost = "dx1f"
+dbuser = "awips"
+dbpass = ""
+nrldb_host = "165.92.28.1"
+site = "CCC"
+dbname = "hd_ob92ccc"
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo
new file mode 100644
index 0000000000..f76ac5221e
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/nrldb_control_wfo
@@ -0,0 +1,174 @@
+#NRLDB national configuration file
+#
+#
+[hsa]
+fields = ALL
+
+[wfo]
+fields = ALL
+
+[state]
+fields = ALL
+
+[counties]
+fields = ALL
+
+[network]
+fields = ALL
+
+[rfc]
+fields = ALL
+
+[timezone]
+fields = ALL
+
+#[admin]
+#fields = ALL
+
+[coopcomms]
+fields = ALL
+
+[cooprecip]
+fields = ALL
+
+[coopspons]
+fields = ALL
+
+[dcpowner]
+fields = ALL
+
+#[eligzon]
+#fields = ALL
+
+[gagemaint]
+fields = ALL
+
+[gageowner]
+fields = ALL
+
+[gagetype]
+fields = ALL
+
+[proximity]
+fields = ALL
+
+[telmtype]
+fields = ALL
+
+[telmowner]
+fields = ALL
+
+[telmpayor]
+fields = ALL
+
+[resowner]
+fields = ALL
+
+[damtypes]
+fields = ALL
+
+[location]
+fields = ALL
+
+[riverstat]
+fields = ALL
+
+[benchmark]
+fields = lid, bnum, elev, remark
+
+[observer]
+fields = ALL
+
+#[zonenum]
+#fields = lid, state, zonenum
+
+[reservoir]
+fields = ALL
+
+[crest]
+fields = ALL
+
+[datum]
+fields = ALL
+
+#[dcp]
+#fields = ALL
+[dcp]
+fields = lid, criteria, owner, goes, rptfreq, rptime, notify, obsvfreq, randrept
+
+[descrip]
+fields = ALL
+
+[flood]
+fields = ALL
+
+[floodcat]
+fields = ALL
+
+[floodstmt]
+fields = ALL
+
+[gage]
+fields = ALL
+
+[lowwater]
+fields = ALL
+
+[pub]
+fields = ALL
+
+[refer]
+fields = ALL
+
+#[telem]
+#fields = ALL
+[telem]
+fields = lid, type, payor, cost, criteria, owner, phone, sensorid, rptfreq, notify, obsvfreq
+
+[rating]
+fields = ALL
+
+[ratingshift]
+fields = ALL
+
+[contacts]
+fields = ALL
+
+[countynum]
+fields = ALL
+
+[unitgraph]
+fields = ALL
+
+[hgstation]
+fields = ALL
+
+#[floodts]
+#fields = ALL
+
+[lwstmt]
+fields = ALL
+
+[rpffcstgroup]
+fields = ALL
+
+[rpffcstpoint]
+fields = ALL
+
+[locdatalimits]
+fields = lid,pe,dur,monthdaystart,monthdayend,gross_range_min,gross_range_max,reason_range_min,reason_range_max,roc_max
+
+[sshpconfig]
+fields = ALL
+
+[shefpe]
+fields = ALL
+
+[shefdur]
+fields = ALL
+
+#[ingestfilter]
+#fields = ALL
+
+[locarea]
+fields = ALL
diff --git a/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt
new file mode 100644
index 0000000000..99f27bad14
--- /dev/null
+++ b/nativeLib/files.native/awipsShare/hydroapps/whfs/local/data/app/nrldb/sed_script.txt
@@ -0,0 +1 @@
+s/'/\\'/g
diff --git a/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c b/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c
index 109d64e57b..cbaa91f41d 100644
--- a/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c
+++ b/nativeLib/rary.ohd.pproc/src/MPEFieldGen/TEXT/main_mpe_fieldgen.c
@@ -174,7 +174,7 @@ void main_mpe_fieldgen_for_calls_from_editor(int num_args, char ** args)
sprintf ( message , "\t\tMPE Precip Processing -- %s\n", strTempTime) ;
printMessage( message, logFile );
- sprintf ( message , "\t\tLast Modification: August 1, 2013 \n") ;
+ sprintf ( message , "\t\tLast Modification: September 19, 2013 \n") ;
printMessage( message, logFile );
sprintf ( message , "\t\t \n") ;
printMessage( message, logFile );
diff --git a/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c b/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c
index 068d0d46c0..55dc06d34b 100644
--- a/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c
+++ b/nativeLib/rary.ohd.pproc/src/MPEGui/TEXT/read_field_data_RFCW.c
@@ -201,21 +201,6 @@ void display_field_data_RFCW ( enum DisplayFieldData display_data ,
idate = date.month*1000000 + date.day*10000 + date.year;
sprintf(fname,"%s/%s%08d%02dz",dirname,cv_use_tmp,idate,date.hour);
}
- else if ( display_data == display_satPrecip )
- {
- iyr = date.year ;
- imo = date.month ;
- ida = date.day ;
- ihr = date.hour ;
- im = 0 ;
- is = 0 ;
- tdiff = -1 ;
- tunit = 2 ;
- TADJ ( & iyr , & imo , & ida , & ihr , & im , & is , & tdiff , & tunit ) ;
- sprintf ( fname , "%s/%4d%02d%02d_%02d00.multi" , dirname , iyr , imo ,
- ida , ihr ) ;
-
- }
else if ( display_data == display_rfcMosaic )
{
sprintf(fname,"%s/%s01%sz",dirname,cv_use_tmp,date.cdate);
@@ -225,16 +210,10 @@ void display_field_data_RFCW ( enum DisplayFieldData display_data ,
sprintf(fname,"%s/%s%sz",dirname,cv_use_tmp,date.cdate);
}
- if ( display_data != display_satPrecip )
- {
- len_fname = strlen ( fname ) ;
- display_field_read_xmrg ( data_array_tmp , fname, addition_flag , rowSize, colSize );
- }
- else
- {
- /* Special logic to process the satellite image. */
- display_field_read_spe ( data_array_tmp , fname, addition_flag ) ;
- }
+ len_fname = strlen ( fname ) ;
+ display_field_read_xmrg ( data_array_tmp , fname, addition_flag , rowSize, colSize );
+
+
}
diff --git a/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c b/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c
index 3f1999801e..b527f55dde 100644
--- a/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c
+++ b/nativeLib/rary.ohd.pproc/src/nc2grib/TEXT/main_nc2grib.c
@@ -1,18 +1,11 @@
-/*
- * main_nc2grib.c
- *
- * Created on: Aug 26, 2011
- * Author: snaples
- */
-
/************************************************************************************
*
* nc2grib - GFE NetCDF to GRIB1 translator
*
* Dave Miller Wyle/IS, OHD/HSEB Version 4.1 August 2009
*
-* This routine reads a NetCDF file created by the GFE command ifpnetCDF and
-* creates a GRIB1 file from the parameter information. This is required for
+* This routine reads a NetCDF file created by the GFE command ifpnetCDF and
+* creates a GRIB1 file from the parameter information. This is required for
* the CHPS/FEWS application as the NetCDF file is not presently CF compliant.
* However, GRIB1 is self-describing and can be translated by the CHPS/FEWS
* application.
@@ -24,13 +17,13 @@
* - Precipitation
*
* Part of the difficulty is in choosing an equivalent GRIB1 parameter to the
-* GFE parameter. In the case of PET, this doesn't exactly match and
+* GFE parameter. In the case of PET, this doesn't exactly match and
* Evaporation was chosed as a close substitute.
-*
-* In addition, since GRIB is particular in several areas, decided to have a
+*
+* In addition, since GRIB is particular in several areas, decided to have a
* lookup table file which will provide some of the values needed to correctly
-* encode into GRIB. In addition, this wasn't done for gribit and one has to
-* modify the code whenever a new process is created. However, reading from
+* encode into GRIB. In addition, this wasn't done for gribit and one has to
+* modify the code whenever a new process is created. However, reading from
* a text file requires no code change as long as the parameters don't change.
* That logic could perhaps change as well.
*
@@ -56,15 +49,16 @@
* Grid 218 for NPVU processing. Had to modify the source in order for it to use 10km grid
* instead of the standard 12 km grid, but that wasn't too difficult.
*
-* The routine reads NetCDF files with multiple time steps and outputs individual GRIB1 files
+* The routine reads NetCDF files with multiple time steps and outputs individual GRIB1 files
* according to their valid times. This can be done for either forecast or observed grids.
-*
+*
* Version 4 allows users to combine all GRIB messages into one file. This becomes useful
-* when dealing with a lot of files for a parameter such as 1 hour QPF or temperature that
-* goes out to 240 hours.
-*
+* when dealing with a lot of files for a parameter such as 1 hour QPF or temperature that
+* goes out to num_hours hours.
+*
* This is still a work in progress and code can always be improved to increase efficiency.
*
+* Oct 2011 - PTilles - added read of new token for defining number of days of data to process
**********************************************************************************************/
#include
#include
@@ -85,12 +79,13 @@
#include "packgrib.h"
#include "getopt.h"
-
#include "cmapf.h"
+#include "version_info.h"
+
#define SECINHR 3600.
-#define PATH_LEN 500
-#define FILE_LEN 300
+#define PATH_LEN 500
+#define FILE_LEN 300
#define BUFFSIZE 1024
#define CMDSIZE 1000
#define COPYSIZE 4200000
@@ -103,7 +98,7 @@
#define UNERR -6 /* unexpected or unhandled input to the program */
#define CDFERR -7 /* error with the NetCDF file */
#define FILEOPERR -8 /* this is a file operations error */
-#define USAGE 1 /* return for usage */
+#define USAGE 1 /* return for usage */
#define MALERR -10 /* memory allocation error */
/* This structure reads lookup values from a file called gfe2grib.txt and compares these
@@ -120,14 +115,14 @@ typedef struct {
int timeunit;
} mygfe2grib;
-int nc_getAppsDefaults(const char* strToken, char* strTokenValue);
+int getAppsDefaults(const char* strToken, char* strTokenValue);
/************************************************************************
* This function loads token value.
* If token is not available, return -1; otherwise return 0.
************************************************************************/
-int nc_getAppsDefaults(const char* strToken, char* strTokenValue)
+int getAppsDefaults(const char* strToken, char* strTokenValue)
{
int tokenLen, tokenValueLen;
@@ -151,37 +146,37 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char *userformat);
int basetime_ansi_to_timet(char *ansi, time_t *timet);
-int nc2grib_main (int argc, char *argv[])
+int main (int argc, char *argv[])
{
extern char *optarg; /* these are for the getopt C library function */
extern int optind, optopt;
+
-
-
+
char fn[PATH_LEN+FILE_LEN]={'\0'}; /* complete input NetCDF path filename */
-
-
- char *infn=NULL; /* input NetCDF filename */
+
+
+ char *infn=NULL; /* input NetCDF filename */
char *ofntemp=NULL; /* output filename template for tailored date/time format */
char *ofn=NULL; /* output GRIB1 filename */
char outfn[PATH_LEN+FILE_LEN]={'\0'}; /* complete output GRIB1 path and filename */
char outfnqpf[PATH_LEN+FILE_LEN]={'\0'}; /* output GRIB1 NPVU filename */
char onegrib[PATH_LEN+FILE_LEN]={'\0'}; /* output path and combine GRIB file if desired */
-
+
char *onegfname=NULL; /* output filename for combined GRIB file if desired */
-
+
char *inpath=NULL; /* input NetCDF path */
-
+
char *gribdir=NULL; /* output GRIB path */
-
+
char command[CMDSIZE]={'\0'}; /* command string called via system */
char fileline[LINE_MAX]={'\0'}; /* holds an input line from gfe2grib.txt file */
char tmpNPVUfn[PATH_LEN+FILE_LEN]={'\0'}; /* temporary holding file for part of GRIB1 message */
char wmohdr1[7]={'\0'}; /* first part of WMO header */
char wmohdr2[5]={'\0'}; /* second part of WMO header */
-
+
char crcrlf[3]={'\r','\r','\n'}; /* needed to separate WMO header from first part of GRIB message */
unsigned char aspace={' '}; /* contains a space character for the header */
unsigned char header[18]={'\0'}; /* full WMO header string */
@@ -191,19 +186,21 @@ int nc2grib_main (int argc, char *argv[])
time_t curtime, basetime_t; /* time_t variables */
char adayhrmin[7]={'\0'}; /* day, hour, minute info attached to WMO header */
-
-
-
+
+
+
int numgfeparms=0;
-
-
-
+
+
+ char cnum[3] = {'\0'};
+ int num_hours = 0; /* (num_days * 24) */
+ /* number of days of data to process - read from token - previously hard coded as 10 */
+ /* default value = 10 - if token not found then default value used */
+ int num_days = 0;
+
int numgfiles=0; /* number of grib files for combining files into one if desired */
- char *gfiles[240]; /* array of char pointers for holding grib filenames if combining files */
-
-
-
- /* for reading the NetCDF file */
+
+ /* for reading the NetCDF file */
int cdfid; /* Netcdf id */
int ndims; /* number of dimensions */
int nvars; /* number of variables */
@@ -211,7 +208,7 @@ int nc2grib_main (int argc, char *argv[])
int recdim;
long start[] = {0, 0, 0}; /* start at first value */
long start1r[] = {0, 0}; /* accounts for netcdf with only 1 record and 2 dimensions of y,x */
-
+
/* flags for different purposes: creating the header, verbose debugging,
only processing 1 valid time in the NetCDF file (debugging)
*/
@@ -231,7 +228,7 @@ int nc2grib_main (int argc, char *argv[])
int errflag=0; /* error flag */
int fflagcntr=0; /* a counter used in conjunction with the format flag */
int onegribflag=0; /* flag for combining all GRIB messages into one file */
-
+
int found=0;
int Yflag=0;
int Mflag=0;
@@ -242,19 +239,19 @@ int nc2grib_main (int argc, char *argv[])
int qpewarn=0;
int formwarn=0;
int onegribwarn=0;
-
+
/* flag used with setting temp grib file to beginning for NPVU processing */
-
+
int firstch=0;
-
+
/* For storing information retrieved from the NetCDF file */
-
+
double stdParallelOne, stdParallelTwo, xlov;
double *latlonLL, *latlonUR, lonOrigin,*domainOrigin, *domainExtent, *latLonOrigin;
int *gridPointLL, *gridPointUR;
double x1, y1, x2, y2, lat1, lon1, lat2, lon2;
nc_type vt_type, dn_type, ll_type, d_type, g_type;
- nc_type cdfvar_type;
+ nc_type cdfvar_type;
int vt_len, ll_len, d_len, g_len;
int cdfvar_id, *gridSize;
int cdfvar_ndims;
@@ -267,17 +264,17 @@ int nc2grib_main (int argc, char *argv[])
char projection[MAX_NC_NAME]={'\0'};
long dim_size;
float *cdfvargrid=NULL; /* this is the main array holding the actual data values */
- float arraysize;
+ float arraysize;
long *validTimes;
char descriptName[MAX_NC_NAME]={'\0'};
-
-
+
+
/* based on the filename, these are used to determine several time strings which
could be coded differently depending on the parameter and whether this is a
forecast or observed (estimated) grid
*/
-
+
char basetime[ANSI_TIME_LEN+1]={'\0'};
char basistime[11]={'\0'}; /* length of this should not change */
/* char *basistime=NULL;*/
@@ -286,56 +283,56 @@ int nc2grib_main (int argc, char *argv[])
char dummy[FILE_LEN]={'\0'};
float timediff;
int timedif_hr, perflag;
-
+
double dxdy; /* holds the DX, DY at standard latitude from a given map projection */
-
+
int dumint[4]; /* dummy int array */
maparam stcprm; /* mapping structure required to hold projection parameters after initialization */
/* part of dmapf-c/cmapf */
-
+
/* several file string variables */
-
+
char file_path[PATH_LEN+FILE_LEN]={'\0'};
char pprocbin[PATH_LEN+FILE_LEN]={'\0'};
char appsdir[PATH_LEN+FILE_LEN]={'\0'};
char process[FILE_LEN]={'\0'};
-
- /*
-
- The fcsth hold the forecast hours determined by differencing the basis time from the
+
+ /*
+
+ The fcsth hold the forecast hours determined by differencing the basis time from the
valid time in the NetCDF file. This is then used to determine the valid time in the GRIB
message of the grid.
*/
-
+
int i, j, m, x, y, status, yr, mon, day, hrmin, sec, fcsth, esth, c;
-
+
/* holds a position value of date/time wildcards in the output filename */
-
+
size_t psn=0;
char * valptr=NULL;
-
+
/* these are a couple of check flags: missing data and all zeros. The missing data will
cause the program to return with an error. The zeros is a warning but this could be
correct in the case of QPE or QPF.
*/
int mischek=0;
int zerochek=0;
-
-
+
+
/* declare structure variable */
-
+
mygfe2grib gfe2grib;
-
+
/* file and directory status structure variable */
-
+
struct stat st;
-
+
FILE *fptrqpf, *fptr, *fp, *tmpfptr, *onegfptr; /* file pointers */
+
-
-
+
/**************************GRIB PARAMETERS for packgrib**********************/
int grib_lbl[43]; /* holds the values for the GRIB meta data */
@@ -350,35 +347,35 @@ int nc2grib_main (int argc, char *argv[])
size_t length;
size_t idim;
-
- output_buffer = (size_t *) malloc (sizeof(size_t)*odim); /* output buffer used when writing GRIB message */
-
+
+ output_buffer = (size_t *) malloc (sizeof(size_t)*odim); /* output buffer used when writing GRIB message */
+
/* output_buffer = (int *) malloc (sizeof(int)*odim); /* output buffer used when writing GRIB message */
-
+
if(output_buffer==NULL)
{
printf(" ERROR: Something went wrong with memory allocation for the GRIB output buffer....exiting\n");
return MALERR;
}
+
+
+/************** start main routine ************************************************/
+
-
-/************** start main routine ************************************************/
-
-
-
+
/* parse command line arguments */
- while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1")) != -1) {
-
+ while ((c = getopt(argc, argv, ":n:i:t:o::b:p:g:Nfrqhv1V")) != -1) {
+
switch (c) {
-
+
case 'i': /* input filename option */
-
+
if (iflag)
errflag++;
- else
+ else
{
iflag++;
if(optarg!=NULL)
@@ -396,9 +393,9 @@ int nc2grib_main (int argc, char *argv[])
printf(" ERROR: Something went wrong with memory allocation for the input file name....exiting\n");
return MALERR;
}
-
+
strcpy(infn, optarg);
-
+
*(infn+strlen(optarg))='\0';
}
else
@@ -417,10 +414,10 @@ int nc2grib_main (int argc, char *argv[])
case 'o': /* output filename option */
if (oflag)
errflag++;
- else
+ else
{
oflag++;
-
+
if(argv[optind]!=NULL && *(argv[optind])!='-') /* have to process a bit differently as this option
has an option argument */
{
@@ -449,12 +446,12 @@ int nc2grib_main (int argc, char *argv[])
printf(" ERROR: Something went wrong with memory allocation for the temp output file name....exiting\n");
return MALERR;
}
-
+
/* copy to both because will use in conjunction with -f format flag if specified */
-
- strcpy(ofntemp,argv[optind]);
+
+ strcpy(ofntemp,argv[optind]);
strcpy(ofn,argv[optind]);
-
+
}
else
{
@@ -471,13 +468,13 @@ int nc2grib_main (int argc, char *argv[])
}
oflag=0;
}
-
+
}
break;
case 't': /* output path option */
if (tflag)
errflag++;
- else
+ else
{
tflag++;
if(optarg!=NULL)
@@ -489,13 +486,13 @@ int nc2grib_main (int argc, char *argv[])
free(gribdir);
gribdir=NULL;
}
- gribdir=(char *) malloc(sizeof(char)*(strlen(optarg)+1));
+ gribdir=(char *) malloc(sizeof(char)*(strlen(optarg)+1));
if(gribdir==NULL)
{
printf(" ERROR: Something went wrong with memory allocation for the grib directory name....exiting\n");
return MALERR;
}
-
+
strcpy(gribdir,optarg);
*(gribdir+strlen(optarg))='\0';
}
@@ -505,7 +502,7 @@ int nc2grib_main (int argc, char *argv[])
errflag++;
optind--;
}
-
+
}
else
errflag++;
@@ -514,26 +511,26 @@ int nc2grib_main (int argc, char *argv[])
case 'n': /* input path option */
if (nflag)
errflag++;
- else
+ else
{
nflag++;
if(optarg!=NULL)
{
-
+
if(*optarg!='-')
- {
+ {
if(inpath!=NULL)
{
free(inpath);
inpath=NULL;
- }
+ }
inpath=(char *) malloc(sizeof(char)*(strlen(optarg)+1));
if(inpath==NULL)
{
printf(" ERROR: Something went wrong with memory allocation for the input directory name....exiting\n");
return MALERR;
}
-
+
strcpy(inpath,optarg);
*(inpath+strlen(optarg))='\0';
}
@@ -543,19 +540,19 @@ int nc2grib_main (int argc, char *argv[])
errflag++;
optind--;
}
-
-
+
+
}
else
errflag++;
-
+
}
break;
case 'p': /* GFE process id option */
if (pflag)
errflag++;
- else
+ else
{
pflag++;
if(optarg!=NULL)
@@ -564,28 +561,28 @@ int nc2grib_main (int argc, char *argv[])
if(*optarg!='-')
{
-
+
strcpy(process,optarg);
}
else
{
printf("\n Option -%c requires a value\n", c);
-
+
errflag++;
optind--;
}
-
+
}
else
errflag++;
-
+
}
break;
case 'N': /* flag to process NPVU QPF files */
if (headflag)
errflag++;
- else
+ else
headflag++;
break;
case 'f': /* format flag option */
@@ -594,7 +591,7 @@ int nc2grib_main (int argc, char *argv[])
else
{
fflag++;
-
+
}
break;
case 'q': /* QPE flag option */
@@ -603,7 +600,7 @@ int nc2grib_main (int argc, char *argv[])
else
{
qflag++;
-
+
}
break;
case 'r': /* estimated flag option */
@@ -612,23 +609,23 @@ int nc2grib_main (int argc, char *argv[])
else
{
rflag++;
-
+
}
break;
-
+
case 'b': /* basis time flag option */
if (bflag)
errflag++;
else
- {
+ {
bflag++;
if(optarg!=NULL)
{
if(*optarg!='-')
{
-
-
+
+
strcpy(basistime,optarg);
}
@@ -638,38 +635,38 @@ int nc2grib_main (int argc, char *argv[])
errflag++;
optind--;
}
-
+
}
else
errflag++;
}
break;
-
+
case 'g': /* combined GRIB message file option */
if (onegribflag)
errflag++;
- else
+ else
{
onegribflag++;
if(optarg!=NULL)
{
-
+
if(*optarg!='-')
- {
+ {
if(onegfname!=NULL)
{
free(onegfname);
onegfname=NULL;
- }
+ }
onegfname=(char *) malloc(sizeof(char)*(strlen(optarg)+1));
if(onegfname==NULL)
{
printf(" ERROR: Something went wrong with memory allocation for the input directory name....exiting\n");
return MALERR;
}
-
+
strcpy(onegfname,optarg);
*(onegfname+strlen(optarg))='\0';
}
@@ -679,27 +676,31 @@ int nc2grib_main (int argc, char *argv[])
errflag++;
optind--;
}
-
-
+
+
}
else
errflag++;
-
+
}
break;
-
+
case 'h': /* display help */
helpflag++;
break;
case 'v': /* turn on verbose debugging */
if (debugflag)
errflag++;
- else
+ else
debugflag++;
break;
case '1': /* process only one record of NetCDF, useful for debugging */
time1flag++;
break;
+ case 'V':
+ printf("version number = %s%s\n",VERSION_NAME,VERSION_NUMBER);
+ exit(0);
+ break;
case ':': /* for options that need an operand */
if(optopt != 'o')
{
@@ -723,18 +724,19 @@ int nc2grib_main (int argc, char *argv[])
}
}
break;
-
+
case '?':
printf("Unrecognized program command line option: -%c\n", optopt);
errflag++;
-
-
-
+
+
+
}
- }
-
-
- if (errflag || helpflag || argc==1 || ( iflag==0 || pflag==0) )
+
+ } /* while c = getopt */
+
+
+ if (errflag || helpflag || argc==1 || ( iflag==0 || pflag==0) )
{
if ( iflag==0 || pflag==0)
{
@@ -742,40 +744,58 @@ int nc2grib_main (int argc, char *argv[])
"was missing when running nc2grib. These must be specified as inputs to nc2grib at a minimum \n" \
"in order for it to run. Check usage of nc2grib below.\n");
}
- status=display_usage();
+ status=display_usage();
return USAGE;
}
+
+/* Print CHPS build number */
+ printf("version number = %s%s\n",VERSION_NAME,VERSION_NUMBER);
+
+ if(getAppsDefaults("nc2g_num_days",cnum) == -1)
+ {
+ num_days = 10;
+ }
+ else
+ {
+ num_days = atoi(cnum);
+ }
- if(nc_getAppsDefaults("nc2g_app_dir",appsdir) == -1)
+ num_hours = num_days * 24;
+ //numgfiles = num_hours;
+ char *gfiles[num_hours]; /* array of char pointers for holding grib filenames if combining files */
+
+ printf("\n number of days to process = %d \n", num_days);
+
+ if(getAppsDefaults("nc2g_app_dir",appsdir) == -1)
{
fprintf(stderr," ERROR: Invalid token value for token \"nc2g_app_dir\".\n\t Program exit.\n");
status=display_usage();
return APSDEFERR;
}
-
+
sprintf(file_path,"%s/%s",appsdir,"gfe2grib.txt");
-
+
if((fp = fopen(file_path, "r")) == NULL)
{
printf (" ERROR: cannot open GFE NetCDF parameter input file: %s\n\tProgram exit.", file_path) ;
return OPENERR;
}
-
-
+
+
if(gribdir==NULL)
{
gribdir=(char *) malloc(sizeof(char)*(PATH_LEN+1));
-
+
if (gribdir==NULL)
{
printf(" ERROR: Something went wrong with memory allocation for the grib output directory....exiting\n");
return MALERR;
}
-
+
*(gribdir+PATH_LEN)='\0';
-
- if(nc_getAppsDefaults("fewsgrib_dir",gribdir) == -1)
+
+ if(getAppsDefaults("fewsgrib_dir",gribdir) == -1)
{
printf(" ERROR: Invalid token value for token \"fewsgrib_dir\".\n\t Program exit.");
status=display_usage();
@@ -786,50 +806,58 @@ int nc2grib_main (int argc, char *argv[])
printf("\n Debug option on...GRIB directory not specified. Will save output GRIB files to:\n" \
" %s \n",gribdir);
}
-
+
}
else if (debugflag>0)
{
printf("\n Debug option on...GRIB directory specified as %s\n",gribdir);
}
+
-
-
+
/**************************************************************************/
/* debugflag > 0; debug option is on */
-
+
if(debugflag>0)
printf("\n Debug option on...reading from GFE to GRIB configuation file:\n" \
- " %s\n\n",file_path);
-
-/**************************************************************************/
+ " %s\n\n",file_path);
+
+/**************************************************************************/
+
+ /* Read one line at a time from the gfe2grib.txt configuration file. It is
+ * an arguable point as to whether this is necessary or even adds to the
+ * functionality of the program. I believe all this information can be
+ * derived directly from the NetCDF file. djsiii 13 Sep 2012
+ */
+ /* NOTE - THERE IS A PROBLEM HERE WITH COMMENTS - THIS CODE ONLY CHECKS FOR
+ * '#' CHARACTERS AT THE BEGINNING OF THE LINE.
+ */
while (fgets(fileline, LINE_MAX, fp) != NULL)
{
-
+
if(fileline[0] != '#') /* check for comments */
{
-
+
sscanf(fileline,"%s%s%d%d%d%d%d",gfe2grib.process, gfe2grib.gfename, &gfe2grib.processid,
&gfe2grib.gribnum,&gfe2grib.decscale, &gfe2grib.timerange, &gfe2grib.timeunit);
if(debugflag>0)
printf(" DEBUG: Read in from gfe2grib.txt %s %s %d %d %d %d %d \n",gfe2grib.process, gfe2grib.gfename, gfe2grib.processid,
- gfe2grib.gribnum,gfe2grib.decscale, gfe2grib.timerange, gfe2grib.timeunit);
-
-
+ gfe2grib.gribnum,gfe2grib.decscale, gfe2grib.timerange, gfe2grib.timeunit);
+
+
/* if (strstr(gfe2grib.process, process)!=NULL) */ /* found a problem using this. try next if instead */
if (!(strcmp(gfe2grib.process, process)))
{
-
found = 1;
break;
}
- }
- }
-
+ } /* If not a comment */
+ } /* While we haven't reach the end of the gfe2grib.txt file */
+
if (found==0)
{
printf(" Could not match input process ID with those in gfe2grib.txt file\n" \
@@ -839,27 +867,26 @@ int nc2grib_main (int argc, char *argv[])
else if(debugflag)
{
printf(" DEBUG: Match found between input process ID and value stored in gfe2grib.txt file\n" \
- " Process ID = %s\n",process);
+ " Process ID = %s\n",process);
}
- fclose(fp);
+ fclose(fp);
/* open the Netcdf file*/
-
+
if(inpath==NULL)
{
inpath=(char *) malloc(sizeof(char)*(FILE_LEN+1));
-
-
+
if(inpath==NULL)
{
printf(" ERROR: Something went wrong with memory allocation for the NetCDF input directory....exiting\n");
return MALERR;
}
-
+
*(inpath+FILE_LEN)='\0';
-
- if(nc_getAppsDefaults("netcdf_dir",inpath) == -1)
+
+ if(getAppsDefaults("netcdf_dir",inpath) == -1)
{
printf(" ERROR: Invalid token value for token \"netcdf_dir\".\n\t Program exit.");
return APSDEFERR;
@@ -868,24 +895,25 @@ int nc2grib_main (int argc, char *argv[])
{
printf(" Default path for the input NetCDF file not specified...Will use the following:\n" \
" %s\n",inpath);
- }
- }
-/***************************************************************************/
+ }
+ } /* if inpath is NULL */
+
+/***************************************************************************/
else if(debugflag)
printf(" Will attempt to read NetCDF file from this path:\n" \
" %s\n\n",inpath);
-
-/**************************************************************************/
+
+/**************************************************************************/
if (stat(inpath,&st) != 0)
{
printf(" ERROR: The NetCDF input path does not exist. Please correct this error and try again.\n");
return FILEERR;
}
-
+
sprintf(fn,"%s/%s",inpath,infn);
-
- cdfid=ncopen(fn,NC_NOWRITE);
-
+
+ cdfid = ncopen(fn,NC_NOWRITE);
+
if (cdfid==-1)
{
printf("\n ERROR: Could not open the netcdf file: %s\n", fn);
@@ -895,14 +923,14 @@ int nc2grib_main (int argc, char *argv[])
{
printf ("\n Netcdf file %s was opened successfully.\n\n",fn);
}
-
+
/* Inquire about the Netcdf file: No.of dimensions, No.of variables,
No. of global attributes etc.*/
-
+
ncinquire (cdfid, &ndims, &nvars, &ngatts, &recdim);
-/*************************************************************************/
-/* debug */
-
+/*************************************************************************/
+/* debug */
+
if (debugflag >0)
{
printf("\n Debug option on. Debug info from reading the netcdf file follows:\n\n");
@@ -911,22 +939,22 @@ if (debugflag >0)
printf (" Number of global attributes for this netcdf file is: %d\n",ngatts);
}
/*************************************************************************/
-
-
- cdfvar_id = 0; /* this should not change for this application as the first variable will be the one
- that contains the QPF, Temp, etc. */
-
+
+
+ cdfvar_id = 0; /* this should not change for this application as the first variable will be the one
+ that contains the QPF, Temp, etc. */
+
ncvarinq (cdfid, cdfvar_id, varname, &cdfvar_type, &cdfvar_ndims, cdfvar_dims, &cdfvar_natts);
-
+
printf ("\n NetCDF variable name = %s\n",varname);
-/***********************************************************************/
-if (debugflag>0)
-{
+/***********************************************************************/
+if (debugflag>0)
+{
printf (" Number of %s dimensions - %d\n",varname, cdfvar_ndims);
printf (" Number of %s attributes - %d\n\n",varname, cdfvar_natts);
}
-/**********************************************************************/
+/**********************************************************************/
if (strstr(varname,gfe2grib.gfename)==NULL)
{
printf("ERROR: The parameter name in the GFE NetCDF file, %s, doe not match the one\n" \
@@ -934,12 +962,12 @@ if (debugflag>0)
"In gfe2grib.txt process ID %s is associated with GFE parameter name %s.\n" \
"Please specify the correct process ID and try again\n\n",varname,gfe2grib.process,gfe2grib.gfename);
return CDFERR;
- }
+ }
if(cdfvar_ndims==3) /* in some cases, this may not be true if file is produced from MPE/DQC */
- {
- for (i=0; i0)
return CDFERR;
}
-/*************************************************************************/
+/*************************************************************************/
if (debugflag >0)
{
printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size);
}
/*************************************************************************/
-
+
}
}
else if (cdfvar_ndims==2)
{
-
-
+
+
for (i=0; i0)
{
printf(" DEBUG: cdfvar dimension %d: name=%s size=%ld\n",i+1,dimname,dim_size);
}
/*************************************************************************/
-
+
}
}
- else
+ else
{
printf("\n nc2grib is not coded to handle %d number of dimensions for variable %s.\n" \
" Please ensure the NetCDF file is created properly for two or three dimensions, where\n" \
" two dimensions indicates only 1 record of the variable and three dimensions allow\n" \
" the NetCDF file to contain multiple records.\n",cdfvar_ndims,varname);
return CDFERR;
- }
-
+ }
+
/* get variable attributes */
-
+
arraysize = x * y;
-
+
cdfvargrid = (float *) malloc (sizeof(float)*arraysize);
-
+
long count[]={1,y,x};
long count1r[]={y,x};
ncattinq(cdfid,cdfvar_id,"validTimes",&vt_type,&vt_len);
-
+
validTimes = (long *) malloc(vt_len * nctypelen(vt_type));
-
+
ncattget(cdfid, cdfvar_id, "validTimes", validTimes);
-
+
ncattget(cdfid, cdfvar_id, "descriptiveName", descriptName);
-
+
ncattget(cdfid, cdfvar_id, "siteID", siteID);
-
+
ncattget(cdfid, cdfvar_id, "units", cdfunits);
-
+
ncattget(cdfid, cdfvar_id, "projectionType", projection);
-
+
ncattinq(cdfid,cdfvar_id,"latLonLL",&ll_type,&ll_len);
-
- latlonLL = (double *) malloc(ll_len * nctypelen(ll_type));
-
+
+ latlonLL = (double *) malloc(ll_len * nctypelen(ll_type));
+
ncattget(cdfid, cdfvar_id, "latLonLL", (void *) latlonLL);
-
+
latlonUR = (double *) malloc(ll_len * nctypelen(ll_type));
ncattget(cdfid, cdfvar_id, "latLonUR", (void *) latlonUR);
-
+
ncattinq(cdfid,cdfvar_id,"domainOrigin",&d_type,&d_len);
+
+ domainOrigin = (double *) malloc(d_len * nctypelen(d_type));
- domainOrigin = (double *) malloc(d_len * nctypelen(d_type));
-
- ncattget(cdfid, cdfvar_id, "domainOrigin", (void *) domainOrigin);
-
+ ncattget(cdfid, cdfvar_id, "domainOrigin", (void *) domainOrigin);
+
ncattinq(cdfid,cdfvar_id,"domainExtent",&d_type,&d_len);
-
- domainExtent = (double *) malloc(d_len * nctypelen(d_type));
+
+ domainExtent = (double *) malloc(d_len * nctypelen(d_type));
ncattget(cdfid, cdfvar_id, "domainExtent", (void *) domainExtent);
-
+
ncattinq(cdfid,cdfvar_id,"gridSize",&g_type,&g_len);
-
+
gridSize = (int *) malloc(g_len * nctypelen(g_type));
-
- ncattget(cdfid, cdfvar_id, "gridSize", (void *) gridSize);
+
+ ncattget(cdfid, cdfvar_id, "gridSize", (void *) gridSize);
ncattinq(cdfid,cdfvar_id,"gridPointLL",&g_type,&g_len);
-
+
gridPointLL = (int *) malloc(g_len * nctypelen(g_type));
-
- ncattget(cdfid, cdfvar_id, "gridPointLL", (void *) gridPointLL);
+
+ ncattget(cdfid, cdfvar_id, "gridPointLL", (void *) gridPointLL);
ncattinq(cdfid,cdfvar_id,"gridPointUR",&g_type,&g_len);
-
+
gridPointUR = (int *) malloc(g_len * nctypelen(g_type));
-
- ncattget(cdfid, cdfvar_id, "gridPointUR", (void *) gridPointUR);
-
+
+ ncattget(cdfid, cdfvar_id, "gridPointUR", (void *) gridPointUR);
+
/* initialize the array to missing value */
-
+
for (i=0;i0)
{
-
+
printf(" DEBUG: siteID = %s\n",siteID);
printf(" DEBUG: number of valid times = %d type = %d\n",vt_len, vt_type);
printf(" DEBUG: descriptName = %s\n",descriptName);
- printf(" DEBUG: projection = %s\n",projection);
-
+ printf(" DEBUG: projection = %s\n",projection);
+
for (i=0; i0)
/* for averaged grids but not used here */
grib_lbl[19]=0;
grib_lbl[20]=0;
-
+
/* parameter code, time unit, time range, decimal scale vary per parameter
and are configurable in the gfe2grib.txt file
*/
grib_lbl[7]=gfe2grib.gribnum;
-
+
grib_lbl[15]=gfe2grib.timeunit;
-
+
grib_lbl[18]=gfe2grib.timerange;
-
+
grib_lbl[22]=gfe2grib.decscale;
-
+
/* level type */
grib_lbl[8]=1; /* surface */
if (grib_lbl[7]==7)
grib_lbl[8]=4; /* freezing level surface */
-
+
/* level */
grib_lbl[9]=0;
grib_lbl[10]=0;
-
+
/* sub-center id */
- if ( strstr(siteID,"TUA")!=NULL )
+ if ( strstr(siteID,"TUA")!=NULL )
{
grib_lbl[21] = 150;
-
+
strcpy(wmohdr2,"KTUA");
}
- else if ( strstr(siteID,"ACR")!=NULL )
- {
+ else if ( strstr(siteID,"ACR")!=NULL )
+ {
grib_lbl[21] = 151;
-
+
strcpy(wmohdr2,"PACR");
}
- else if ( strstr(siteID,"STR")!=NULL )
+ else if ( strstr(siteID,"STR")!=NULL )
{
grib_lbl[21] = 152;
-
+
strcpy(wmohdr2,"KSTR");
}
- else if ( strstr(siteID,"RSA")!=NULL )
+ else if ( strstr(siteID,"RSA")!=NULL )
{
grib_lbl[21] = 153;
-
+
strcpy(wmohdr2,"KRSA");
}
- else if ( strstr(siteID,"ORN")!=NULL )
+ else if ( strstr(siteID,"ORN")!=NULL )
{
grib_lbl[21] = 154;
-
+
strcpy(wmohdr2,"KORN");
}
- else if ( strstr(siteID,"RHA")!=NULL )
+ else if ( strstr(siteID,"RHA")!=NULL )
{
grib_lbl[21] = 155;
strcpy(wmohdr2,"KRHA");
}
- else if ( strstr(siteID,"KRF")!=NULL )
+ else if ( strstr(siteID,"KRF")!=NULL )
{
grib_lbl[21] = 156;
strcpy(wmohdr2,"KKRF");
}
- else if ( strstr(siteID,"MSR")!=NULL )
+ else if ( strstr(siteID,"MSR")!=NULL )
{
grib_lbl[21] = 157;
strcpy(wmohdr2,"KMSR");
}
- else if ( strstr(siteID,"TAR")!=NULL )
+ else if ( strstr(siteID,"TAR")!=NULL )
{
grib_lbl[21] = 158;
strcpy(wmohdr2,"KTAR");
}
- else if ( strstr(siteID,"PTR")!=NULL )
+ else if ( strstr(siteID,"PTR")!=NULL )
{
grib_lbl[21] = 159;
strcpy(wmohdr2,"KPTR");
}
- else if ( strstr(siteID,"TIR")!=NULL )
+ else if ( strstr(siteID,"TIR")!=NULL )
{
grib_lbl[21] = 160;
strcpy(wmohdr2,"KTIR");
}
- else if ( strstr(siteID,"ALR")!=NULL )
+ else if ( strstr(siteID,"ALR")!=NULL )
{
grib_lbl[21] = 161;
strcpy(wmohdr2,"KALR");
}
- else if ( strstr(siteID,"FWR")!=NULL )
+ else if ( strstr(siteID,"FWR")!=NULL )
{
grib_lbl[21] = 162;
-
+
strcpy(wmohdr2,"KFWR");
}
else
- {
+ {
printf(" Unknown site ID %s for this application...Exiting\n",siteID);
return UNERR;
}
-
-
+
+
/* binary data section flag */
grib_lbl[23]=0 ;
-
+
/* packing width of data points */
grib_lbl[24]=16; /* original was 16 in the example 4 in gribit */
-
+
/* initialized but ignored in grib message */
grib_lbl[26]=0;
- grib_lbl[27]=0;
-
+ grib_lbl[27]=0;
+
/* length of GDS */
if (strstr(projection,"POLAR")!=NULL)
{
-
-
+
+
grib_lbl[25]=32; /* polar stereographic and lat/long, 42 for Lambert */
-
-
+
+
/* grid (data representation) type, polar stereographic */
grib_lbl[28]=5;
grib_lbl[29]=(int) x;
grib_lbl[30]=(int) y;
-
+
/* next for initialized but not used */
grib_lbl[39]=0;
grib_lbl[40]=0;
- grib_lbl[41]=0;
- grib_lbl[42]=0;
-
- ncattget(cdfid, cdfvar_id, "lonOrigin", &lonOrigin);
-
- grib_lbl[34]=lonOrigin*1000.; /* longitude of grid point orientation */
-
+ grib_lbl[41]=0;
+ grib_lbl[42]=0;
+
+ ncattget(cdfid, cdfvar_id, "lonOrigin", &lonOrigin);
+
+ grib_lbl[34]=lonOrigin*1000.; /* longitude of grid point orientation */
+
}
else if (strstr(projection,"LAMBERT")!=NULL)
{
-
+
grib_lbl[25]=42; /* Lambert Conformal, 32 for polar */
-
+
/* grid (data representation) type, lambert conformal */
-
+
grib_lbl[28]=3;
-
+
grib_lbl[29]=(int) x;
grib_lbl[30]=(int) y;
ncattinq(cdfid,cdfvar_id,"latLonOrigin",&ll_type,&ll_len);
-
+
latLonOrigin = (double *) malloc(ll_len * nctypelen(ll_type));
-
+
ncattget(cdfid, cdfvar_id, "latLonOrigin", latLonOrigin);
-
- grib_lbl[34]=(*latLonOrigin)*1000.;
-
- ncattget(cdfid, cdfvar_id, "stdParallelOne", &stdParallelOne);
- ncattget(cdfid, cdfvar_id, "stdParallelTwo", &stdParallelTwo);
-
-
+
+ grib_lbl[34]=(*latLonOrigin)*1000.;
+
+ ncattget(cdfid, cdfvar_id, "stdParallelOne", &stdParallelOne);
+ ncattget(cdfid, cdfvar_id, "stdParallelTwo", &stdParallelTwo);
+
+
grib_lbl[39]=stdParallelOne*1000;
grib_lbl[40]=stdParallelTwo*1000;
-
- grib_lbl[41]=0;
- grib_lbl[42]=0;
-
-
+
+ grib_lbl[41]=0;
+ grib_lbl[42]=0;
+
+
}
else
{
printf(" Unknown projection read from netcdf...Exiting");
return CDFERR;
-
+
/* might account for this as this is a lat,lon grid */
- /* comment out for this version */
-
+ /* comment out for this version */
+
/* latitude/longitude grid
grib_lbl(30)=idim
grib_lbl(31)=jdim
@@ -1304,35 +1332,35 @@ if (debugflag >0)
grib_lbl(39)=64
*/
}
-
+
/* resolution component flags */
-
+
grib_lbl[33]=8;
/* must find the grid map parameters and then the dx, dy resolution */
/* normally, these are the same for polar stereographic and even lambert conformal,
but not necessarily
*/
-
+
x1=y1=x2=y2=lat1=lon1=lat2=lon2=0.; /* initialize the end points of the local grid */
-
+
/* Lower left corner of the main projected grid */
-
+
x1=(double) *gridPointLL;
y1=(double) (*(gridPointLL+1));
lon1= (*latlonLL);
lat1= (*(latlonLL+1));
-
+
/* upper right corner of the main projected grid */
-
+
x2=(double) *gridPointUR;
y2=(double) (*(gridPointUR+1));
lon2= (*latlonUR);
lat2= (*(latlonUR+1));
/* check if polar stereographic or lambert conformal to set map parameters correctly */
-
+
if(grib_lbl[25]==32)
stlmbr(&stcprm,90.,lonOrigin);
else if(grib_lbl[25]==42)
@@ -1340,129 +1368,129 @@ if (debugflag >0)
xlov=*latLonOrigin;
stlmbr(&stcprm,eqvlat(stdParallelOne,stdParallelTwo),xlov);
}
-
- /* set Earth radius */
-
+
+ /* set Earth radius */
+
cstrad(&stcprm,6371.2); /* radius of Earth used by NCEP */
-
-
+
+
stcm2p(&stcprm,x1,y1,lat1,lon1,x2,y2,lat2,lon2); /* find map parameters based on known lat/lons */
-
-
+
+
/* find DX DY values, should be identical for the projections for this app */
-
+
if(grib_lbl[25]==32)
dxdy = cgszll(&stcprm, 60., lonOrigin);
-
+
else if(grib_lbl[25]==42)
dxdy = cgszll(&stcprm, eqvlat(stdParallelOne,stdParallelTwo), xlov);
-
+
/*************************************************************************/
if (debugflag >0)
{
-
+
/* debug only */
-
+
printf(" DEBUG: dxdy is %9.3f\n",dxdy);
-
+
printf(" DEBUG: Crosscheck grid lower left and upper right info\n");
-
+
printf(" DEBUG: LL X=%6.0f, LL Y=%6.0f, UR X=%6.0f, UR Y=%6.0f\n" \
- " DEBUG: LL Lat=%f, LL Lon=%f, UR Lat=%f, UR Lon=%f\n",
+ " DEBUG: LL Lat=%f, LL Lon=%f, UR Lat=%f, UR Lon=%f\n",
x1,y1,x2,y2,lat1,lon1,lat2,lon2);
-
-
- printf(" DEBUG: longitude at origin = %d\n",grib_lbl[34]/1000);
-
-
-}
+
+
+ printf(" DEBUG: longitude at origin = %d\n",grib_lbl[34]/1000);
+
+
+}
/*************************************************************************/
-
+
dxdy=ceil(dxdy*1000);
-
+
int dx = dxdy;
int dy = dxdy;
-
+
/* in GFE, the gridsize should equal the extents if using the standard grid resolutions.
- If not, the site has changed resolutions and this must be determined
+ If not, the site has changed resolutions and this must be determined
*/
-
+
if ((int) y != (int) (*(domainExtent+1)) || (int) x != (int) (*domainExtent))
{
/* first calculate x */
-
+
/* this formula is in the GFE online help - Adjusting the Grid Resolution in localConfig.py */
-
-
+
+
dx = dxdy * ((*domainExtent) / ( x -1));
-
+
dy = dxdy * ((*(domainExtent+1)) / ( y -1));
-
+
}
-
+
/* note that this may cause problems for places where dx != dy but they are still using polar stereographic
and it usually assumes these are the same
*/
-
+
grib_lbl[35]=dx;
grib_lbl[36]=dy;
- /* now for the local grid (i.e grid 255 in GRIB), will need to get the lower left lat, lon and
+ /* now for the local grid (i.e grid 255 in GRIB), will need to get the lower left lat, lon and
will use the cxy2ll command here for the domain with origin values of x and y */
-
+
x=*domainOrigin;
y=*(domainOrigin+1);
-
-
+
+
cxy2ll(&stcprm,x,y,&lat1,&lon1); /* Find lat lon */
-
-
- grib_lbl[31]=(lat1)*1000;
- grib_lbl[32]=(lon1)*1000;
-
+
+
+ grib_lbl[31]=(lat1)*1000;
+ grib_lbl[32]=(lon1)*1000;
+
firstLon=grib_lbl[32]; /* must preserve because the packer changes the sign */
-
+
/*****************debug*********************/
if (debugflag>0)
{
printf(" DEBUG: dx = %d dy = %d x = %d extent x = %f y = %d extent y = %f \n",dx,dy,x,*domainExtent, y,(*(domainExtent+1)));
- printf(" DEBUG: for local domain x = %d and y = %d, the corresponding lat = %f lon = %f\n",(int) x, (int) y, lat1, lon1);
-}
-/******************************************/
-
-
+ printf(" DEBUG: for local domain x = %d and y = %d, the corresponding lat = %f lon = %f\n",(int) x, (int) y, lat1, lon1);
+}
+/******************************************/
+
+
grib_lbl[37]=0;
-
+
/* scanning mode flag */
-
+
grib_lbl[38]=64;
-
+
/* in the original packgrib_.c documentation, it was thought that this pds_ext could be anything
the user wanted. However, this area of the GRIB message actually is used by NCEP to include
ensemble forecast information for GRIB1 messages. Therefore this should be set to the NULL
string unless one really means to include ensemble information here.
*/
-
+
strcpy(pds_ext,"");
-
+
iplen=strlen(pds_ext);
/*************************************************************************/
if (debugflag >0)
{
-
+
/* debug only */
-
+
printf(" DEBUG: dxdy is %6.0f\n",dxdy);
-
+
printf(" DEBUG: LL local domain lat=%f lon=%f\n",lat1,lon1);
+
-
-
-
-}
+
+
+}
/************************************************************************/
@@ -1470,122 +1498,122 @@ if (debugflag >0)
/* if(strrchr(fn,'/') != NULL)
slashpos=strrchr(fn,'/') - fn;
-*/
+*/
/* If this is a NetCDF file containing forecast grids, the -b switch with a basis time
- has to be included. Split the basis time so it can be converted to a time_t
+ has to be included. Split the basis time so it can be converted to a time_t
variable and forecast hours can be determined for GRIB P1 and P2 calculation.
*/
if(bflag)
{
-
+
for (i=0;i<4;i++)
{
dummy[i]=basistime[i];
}
-
+
dummy[4]='\0';
-
+
yr=atoi(dummy);
-
+
dummy[0]=basistime[4];
dummy[1]=basistime[5];
dummy[2]='\0';
-
+
mon=atoi(dummy);
-
+
dummy[0]=basistime[6];
dummy[1]=basistime[7];
dummy[2]='\0';
-
+
day=atoi(dummy);
-
+
dummy[0]=basistime[8];
dummy[1]=basistime[9];
dummy[2]='0';
dummy[3]='0';
dummy[4]='\0';
-
+
hrmin=atoi(dummy);
grib_lbl[11]=yr;
grib_lbl[12]=mon;
grib_lbl[13]=day;
grib_lbl[14]=hrmin;
-
+
sprintf(basetime,"%4d-%02d-%02d %c%c:00:00",yr,mon,day,basistime[8],basistime[9]);
-
-
+
+
status = yearsec_ansi_to_timet(basetime, &basetime_t);
/*************************************************************/
if (debugflag>0)
printf("\n DEBUG: Determined basis time = %s basis time_t = %ld sec \n",basetime,basetime_t);
/*************************************************************/
-
+
if (status != 0 || basetime_t <= 0)
{
printf(" The basis time could not be correctly calculated from the input NetCDF filename.\n" \
" Determined basis time = %s basis time_t = %ld sec \n" \
" Please rename the file according to guidance and try again.\n", basetime, basetime_t);
return FILEERR;
- }
+ }
}
-
-
-/************************************************************************************************************
- /* main loop to go through each forecast data set and grib up the data */
-
+
+
+/************************************************************************************************************
+ /* main loop to go through each forecast data set and grib up the data */
+
/* note that had the loop set up for multiple valid times first. Then thought ABRFC way of 1 file per forecast was
- simpler. However, that didn't work for other RFC operations, so went back to multiple forecast hours within 1 NetCDF
- file.
+ simpler. However, that didn't work for other RFC operations, so went back to multiple forecast hours within 1 NetCDF
+ file.
*/
-
+
if (time1flag>0) /* for testing only to do just the first valid time from the netcdf file */
vt_len=2;
/****************************************************************************/
if (debugflag>0)
printf("\n ***Entering main loop to process NetCDF records(s) into GRIB files*** \n\n");
-/****************************************************************************/
+/****************************************************************************/
- for (m=0; m0)
" Exiting...\n");
return UNERR;
}
-
-
+
+
/* this is an "estimate" product rather than a forecast that doesn't need basis time */
-
+
/* As this is an estimate product, it is valid at the end time retrieved from the NetCDF file rather than
determined from a base time. First, though, get the reference time which is the first valid time for this
grid. Will reuse basistime variable here for this purpose.
*/
+
+ status = timet_to_yearsec_ansi((time_t) *(validTimes+m), basistime);
- status = timet_to_yearsec_ansi((time_t) *(validTimes+m), basistime);
-
-
+
for (i=0;i<4;i++)
{
dummy[i]=basistime[i];
}
dummy[4]='\0';
-
+
grib_lbl[11]=atoi(dummy);
-
+
dummy[0]=basistime[5];
dummy[1]=basistime[6];
dummy[2]='\0';
-
+
grib_lbl[12]=atoi(dummy);
-
+
dummy[0]=basistime[8];
dummy[1]=basistime[9];
dummy[2]='\0';
-
+
grib_lbl[13]=atoi(dummy);
-
+
dummy[0]=basistime[11];
dummy[1]=basistime[12];
dummy[2]=basistime[14];
dummy[3]=basistime[15];
dummy[4]='\0';
-
+
grib_lbl[14]=atoi(dummy);
-
+
fcsth=0;
-
+
/* In the case of multiple accumulation periods in the same netcdf file, will need to attach this to the
- filename in both cases. Can't reuse fcsth as it might be needed to determine the WMO header for any
+ filename in both cases. Can't reuse fcsth as it might be needed to determine the WMO header for any
future NPVU estimate/observed grids.
- */
-
-
- esth=(int) ((*(validTimes+m+1)) - (*(validTimes+m)))/ SECINHR;
-
-/*************************************************************/
+ */
+
+
+ esth=(int) ((*(validTimes+m+1)) - (*(validTimes+m)))/ SECINHR;
+
+/*************************************************************/
if (debugflag>0)
printf(" DEBUG: esth = %d valid time = %ld initial time = %ld\n",esth, (*(validTimes+m+1)), (*(validTimes+m)));
-/*************************************************************/
+/*************************************************************/
-
-
- if (esth > 240 || esth < 0)
+
+
+ if (esth > num_hours || esth < 0)
{
- printf(" The estimated/observed time period is either less than 0 or greater than 10 days (240 hours).\n" \
+ printf(" The estimated/observed time period is either less than 0 or greater than %d hours.\n" \
" Therefore, valid times within the input NetCDF filename may not have been generated \n" \
" correctly. Or this is actually a forecast grid and the -b option should be used so it \n" \
" will be processed correctly. Check your options and ensure this is an estimate or observed grid\n" \
" You could also try to generate the file again.\n" \
- " For debug esth = %d\n",esth);
+ " For debug esth = %d\n",num_hours, esth);
return FILEERR;
}
-
-
+
+
/* see the GRIB table on this for determining reference and valid times for different types of products */
if (gfe2grib.timerange==3 || gfe2grib.timerange==4)
{
/* average or accumulation */
/* This will be the time determined from grib_lbl 11-14 to that date/time adding the number hours of esth */
-
+
grib_lbl[16]=0; /* P1 */
grib_lbl[17]=esth; /* P2 */
}
@@ -1683,77 +1711,77 @@ if (debugflag>0)
this to determine the estimate/observed grid valid time as the reference time is the first valid
time in the sequence for the grid.
*/
-
+
grib_lbl[16]=esth; /* P1 */
grib_lbl[17]=0; /* P2 */
-
+
}
else
{
printf(" Unknown time range. Check the gfe2grib.txt file \n");
return UNERR;
}
-
-
- printf("\n\n NetCDF record %d is an estimate/observed product\n", m/2+1);
+
+
+ printf("\n\n NetCDF record %d is an estimate/observed product\n", m/2+1);
/*************************************************************/
if (debugflag>0)
/* this is an estimate/observed product */
printf(" DEBUG: valid time = %d %d %d %d validtime=%s\n" \
- " DEBUG: validTimes = %ld\n", yr, mon, day, hrmin, validtime, *(validTimes+m+1));
+ " DEBUG: validTimes = %ld\n", yr, mon, day, hrmin, validtime, *(validTimes+m+1));
/*************************************************************/
-
+
}
else
{
- printf("\n\n NetCDF record %d is a forecast or estimate product needing basis time\n", m/2+1);
+ printf("\n\n NetCDF record %d is a forecast or estimate product needing basis time\n", m/2+1);
fcsth = (int) ((*(validTimes+m+1)) - basetime_t);
-
+
timediff= (*(validTimes+m+1)) - (*(validTimes+m));
-
+
timedif_hr = (int) timediff/SECINHR;
fcsth /= SECINHR;
-
-/*************************************************************/
+
+/*************************************************************/
if (debugflag>0)
printf(" DEBUG: fcsth = %d timediff=%f valid time = %ld basis time_t = %ld\n",fcsth, timediff,(*(validTimes+m+1)), basetime_t);
-/*************************************************************/
-
- if (fcsth > 240 || fcsth < 0)
+/*************************************************************/
+
+ if (fcsth > num_hours || fcsth < 0)
{
- printf(" The forecast time is either less than 0 or greater than 10 days (240 hours).\n" \
+ printf(" The forecast time is either less than 0 or greater than %d hours.\n" \
" Therefore, the basis time may not be specified correctly or may need to be specified \n" \
" on the command line according to guidance. Please check your command options or \n" \
" or the NetCDF file creation and try again.\n" \
- " for debug fcsth = %d\n",fcsth);
+ " for debug fcsth = %d\n",num_hours, fcsth);
return FILEERR;
}
-
-/*************************************************************************/
+
+/*************************************************************************/
if (debugflag >0)
{
-
+
/* debug only */
-
+
/*printf(" base time 1=%ld 2=%ld diff=%f\n",*(validTimes+m),*(validTimes+m+1),timediff);*/
-
- printf(" DEBUG: reference time = %d%02d%02d%02d \n",yr,mon,day,hrmin);
-
-
-
+
+ printf(" DEBUG: reference time = %d%02d%02d%02d \n",yr,mon,day,hrmin);
+
+
+
}
-/*************************************************************************/
-
+/*************************************************************************/
+
if (gfe2grib.timerange==3 || gfe2grib.timerange==4)
{
/* average or accumulation */
-
+
grib_lbl[16]=fcsth-(int)(timediff/SECINHR); /* P1 */
grib_lbl[17]=fcsth; /* P2 */
}
@@ -1762,10 +1790,10 @@ if (debugflag >0)
/* this is for a forecast product valid at reference time + P1 and
at present using this for PETF
*/
-
+
grib_lbl[16]=fcsth; /* P1 */
grib_lbl[17]=0; /* P2 */
-
+
}
else
{
@@ -1775,139 +1803,139 @@ if (debugflag >0)
}
-
+
/* Get data for this time record */
-
+
if(cdfvar_ndims==3)
{
-
+
start[0]=(long) (m/2);
-
+
status = ncvarget(cdfid,cdfvar_id,start,count,cdfvargrid);
}
else if (cdfvar_ndims==2)
{
start1r[0]=(long) (m/2);
-
+
status = ncvarget(cdfid,cdfvar_id,start1r,count1r,cdfvargrid);
}
-
+
if (status != NC_NOERR)
{
printf(" An error occurred while getting the cdfvar array\n");
return CDFERR;
}
-
+
/* all missing check */
-
+
for (i=0;i xmissing)
{
mischek=1;
break;
}
}
-
+
if(mischek==0)
{
printf(" All data retrieved from the NetCDF file was missing. Exiting program...\n");
return MISCHECK;
}
-
+
/* all data zero check. since already checked for all missing, can see if all data not equal to 0 */
-
+
for (i=0;i 0)
{
printf(" DEBUG WARNING: All data retrieved from the NetCDF file was zero. \n" \
"This may be normal in the case of QPF\n");
-
+
}
-
+
/* depending on the prarameter, convert to GRIB units standards */
-
+
if (grib_lbl[7]==61) /* precipitation */
{
-
+
if(strstr(cdfunits,"in")!=NULL)
{
-
+
for (i=0;i xmissing)
-
+
*(cdfvargrid+i) *= 25.4; /* convert inches to mm */
-
+
}
- }
-
- }
+ }
+
+ }
else if (grib_lbl[7]==11) /* temperature */
{
-
+
if(strstr(cdfunits,"F")!=NULL)
{
-
+
for (i=0;i xmissing)
-
+
*(cdfvargrid+i) = ((*(cdfvargrid+i)-32) * 5/9) + 273.16; /* convert F to K */
-
+
}
-
+
}
else if (strstr(cdfunits,"C")!=NULL)
{
for (i=0;i xmissing)
-
+
*(cdfvargrid+i) += 273.16; /* convert C to K */
-
+
}
- }
+ }
}
-
+
else if (grib_lbl[7]==57) /* evaporation */
{
/* no code yet */
-
+
}
else if (grib_lbl[7]==7) /* height */
- {
+ {
/* this section is for freezing level */
-
+
if(strstr(cdfunits,"ft")!=NULL)
{
-
+
for (i=0;i xmissing)
-
+
*(cdfvargrid+i) *= 0.3048; /* convert feet to meters */
-
+
}
- }
- }
+ }
+ }
else
{
printf(" Unknown parameter found in nc2grib...Exiting\n");
return UNERR;
}
-
-/*************************************************************************/
+
+/*************************************************************************/
if (debugflag >0)
{
printf("\n DEBUG: GRIB message information follows:\n");
@@ -1921,17 +1949,21 @@ if (debugflag >0)
j+=7;
}
}
-}
-/*************************************************************************/
-
-
- status = packgrib(grib_lbl,pds_ext,&iplen,cdfvargrid,&idim,&xmissing,
- output_buffer,&odim,&length);
+}
+/*************************************************************************/
+ /* I'm hoping this is where the "meat" of the conversion is taking place.
+ * I'm also hoping that everything else above is simply parameter checking.
+ * djsiii 13 Sep 2012
+ */
+
+ status = packgrib(grib_lbl,pds_ext,&iplen,cdfvargrid,&idim,&xmissing,
+ output_buffer,&odim,&length);
+
if (status !=0)
- {
- printf(" DEBUG: The routine which packs the grid into GRIB, packgrib, returned with errors status = %d\n",status);
- return SUBERR;
+ {
+ printf(" DEBUG: The routine which packs the grid into GRIB, packgrib, returned with errors status = %d\n",status);
+ return SUBERR;
}
else
{
@@ -1940,15 +1972,15 @@ if (debugflag >0)
else
printf("\n Gribbing of data successful for record %ld\n",start1r[0]+1);
}
-
+
/* create the GRIB1 output filename based input options */
-
-
+
+
/***************************************************************************/
if(debugflag)
printf("\n DEBUG: Creating output file name \n");
-/***************************************************************************/
+/***************************************************************************/
if(ofn==NULL)
{
@@ -1956,37 +1988,37 @@ if(debugflag)
if(debugflag)
printf("\n DEBUG: Output filename not specified...building from input filename \n");
/**************************************************************************/
-
+
if(strstr(infn,".cdf")!=NULL || strstr(infn,".nc") !=NULL)
{
valptr=strstr(infn,".cdf");
if (valptr==NULL)
valptr=strstr(infn,".nc");
-
+
psn=valptr-infn;
-
+
ofn=(char *) malloc(sizeof(char) * (psn+1));
-
+
if (ofn==NULL)
{
-
+
printf(" ERROR: Something went wrong with memory allocation for the GRIB filename....exiting\n");
return MALERR;
}
-
-
+
+
strncpy(ofn,infn,psn);
-
+
*(ofn+psn)='\0';
-
- }
+
+ }
else
{
-
+
ofn=(char *) malloc(sizeof(char)*(strlen(infn)+1));
if (ofn==NULL)
{
-
+
printf(" ERROR: Something went wrong with memory allocation for the GRIB filename...exiting\n");
return MALERR;
}
@@ -1994,55 +2026,55 @@ if(debugflag)
strcpy(ofn,infn);
}
-
+
if(ofntemp!=NULL)
{
free(ofntemp);
ofntemp=NULL;
}
-
+
ofntemp=(char *) malloc(sizeof(char) * (strlen(ofn)+1));
if (ofntemp==NULL)
{
-
+
printf(" ERROR: Something went wrong with memory allocation for the temp output filename...exiting\n");
return MALERR;
}
- if(ofn!=NULL)
+ if(ofn!=NULL)
strcpy(ofntemp,ofn); /* must do this so ofntemp isn't NULL in the comparison below. Might not make a
- difference but better safe than sorry for coding purposes
+ difference but better safe than sorry for coding purposes
*/
else
{
printf("\n ERROR occurred as out filename is NULL and shouldn't be before copying to ofntemp variable \n");
return UNERR;
- }
-
+ }
+
}
-
- /* DTM - 08/18/09. An excellent suggestion from OHRFC (Mark Fenbers) is to use the function strftime and the
+
+ /* DTM - 08/18/09. An excellent suggestion from OHRFC (Mark Fenbers) is to use the function strftime and the
automatic assignment of date/time strings within the input filename. Took a bit of doing but will use modified
- versions of our on Time Util library routines to do this and remove the previous way of doing business
- substituting date/time strings in the filenames. This will move this executable to version 4.1. Need to also
+ versions of our on Time Util library routines to do this and remove the previous way of doing business
+ substituting date/time strings in the filenames. This will move this executable to version 4.1. Need to also
generalize the valid time for NPVU rather than search on the "QPE" string as it done in this present fix.
-
+
If the -f flag is specified, then valid time will be used instead of basetime.
-
- */
-
+
+ */
+
fflagcntr=0; /* initialize counter */
-
+
if(strrchr(ofntemp,'%') != NULL) /* this will indicate that a date format will be substituted */
{
-
+
for (i=0;i0) /* we only need to check this if basis time is used
+
+ if(strstr(ofntemp,"%%") == NULL && bflag>0) /* we only need to check this if basis time is used
in the output filename format */
{
fflagcntr = 0;
-
+
if(datewarn<=0)
{
datewarn++; /* only want to print this warning message for the first record in the NetCDF file */
-
+
printf("\n WARNING: Basis time option was found but the formatted time interval and/or hours from\n" \
" basetime is missing from the input format string. These should have the format of %%0d \n" \
- " or %%d in the input format string. This should be included so as not overwrite files. \n\n");
-
-
+ " or %%d in the input format string. This should be included so as not overwrite files. \n\n");
+
+
printf("\n A default date,time will be used instead.\n" \
" If you intended on a custom one, please check the pattern for the missing\n" \
" time interval/hours past basis time pattern in your command line and try again.\n\n");
@@ -2089,25 +2121,25 @@ if(debugflag)
{
printf("\n Please see WARNING message from first GRIB record generated \n" \
" concerning missing custom date/time wildcards in output GRIB filename \n\n");
- }
+ }
}
- else
+ else
{
if(bflag) /* basis time is included, use the number of hours
past basis time unless other conditions occur with qflag
*/
{
if (qflag)
-/* sprintf(ofn,ofn,timedif_hr);
+/* sprintf(ofn,ofn,timedif_hr);
else if (qflag) */ /* due to filename limitations with base time in the filename,
will need both fcst and timedif_hr so filenames are not the same
- for an estimate product that is the same number of hours from
- base time but different time interval. This can occur in the
- precipitation QPE file from daily QC which has a 24 hour 6-hour
+ for an estimate product that is the same number of hours from
+ base time but different time interval. This can occur in the
+ precipitation QPE file from daily QC which has a 24 hour 6-hour
product and a 24 hour 24-hour product. So we need to count the times
that %% occur together as this will indicate where to put each
*/
- {
+ {
perflag=0;
for (i=0; i=2)
{
if(!rflag) /* normally put time interval difference before hours past basis */
-
- sprintf(ofn,ofn,timedif_hr,fcsth);
+
+ sprintf(ofn,ofn,timedif_hr,fcsth);
else
sprintf(ofn,ofn,fcsth,timedif_hr); /* but reverse the order here */
- }
+ }
else
{
if(qpewarn==0)
@@ -2136,55 +2168,55 @@ if(debugflag)
" a 6-hour QPE and a 24-hour QPE. \n" \
" Please check to ensure you formatted your output string accordingly. \n\n");
qpewarn++;
- }
+ }
sprintf(ofn,ofn,fcsth); /* user is taking responsibility to ensure estimate using basis time
doesn't include multiple time intervals in NetCDF
*/
}
-
- }
+
+ }
else
-
+
sprintf(ofn,ofn,fcsth); /* standard forecast product using forecast hours past basis time */
-
+
}
- else /* without a basis time, this has to be an estimated/observed product using the valid time in
- the output file. Note that if "%%" is NULL and bflag == 0, specifying esth here is
+ else /* without a basis time, this has to be an estimated/observed product using the valid time in
+ the output file. Note that if "%%" is NULL and bflag == 0, specifying esth here is
ignored in the output filename.
*/
-
+
sprintf(ofn,ofn,esth);
-
-
-
+
+
+
if(strstr(ofn,".grb")!=NULL)
sprintf(outfn,"%s/%s",gribdir,ofn);
-
+
else
- sprintf(outfn,"%s/%s.grb",gribdir,ofn);
+ sprintf(outfn,"%s/%s.grb",gribdir,ofn);
}
-
- }
-
+
+ }
+
if(oflag==0 || fflagcntr == 0)
{
-
+
if(strstr(ofn,".grb")!=NULL)
{
valptr=strstr(ofn,".grb");
psn=valptr-ofn;
-
+
strncpy(dummy,ofn,psn);
-
+
dummy[psn]='\0';
-
+
if (ofn!=NULL)
{
free(ofn);
ofn=NULL;
}
-
+
ofn=(char *) malloc(sizeof(char)*(strlen(dummy)+1));
if(ofn==NULL)
{
@@ -2192,34 +2224,34 @@ if(debugflag)
" before the default filename was determined...exiting\n");
return MALERR;
}
+
-
- strcpy(ofn,dummy);
-
+ strcpy(ofn,dummy);
+
}
-
+
if(bflag) /* default filenames if output filename and/or format not specified */
{
if(qflag)
-
+
sprintf(outfn,"%s/%s_%4d%02d%02d%02dh%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,timedif_hr);
-
+
else
-
+
sprintf(outfn,"%s/%s_%4d%02d%02d%02df%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,fcsth);
- }
+ }
else
- sprintf(outfn,"%s/%s_%4d%02d%02d%02df%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,esth);
-
+ sprintf(outfn,"%s/%s_%4d%02d%02d%02df%03d.grb",gribdir,ofn,yr,mon,day,hrmin/100,esth);
+
}
-
+
fptr = fopen ( outfn, "w" ); /* open the output GRIB file */
-
-
+
+
if ( fptr == NULL ) {
printf ( " ERROR: output GRIB file could not be opened.\n" );
return OPENERR;
@@ -2229,10 +2261,10 @@ if(debugflag)
printf(" Writing grib data to file %s...\n",outfn);
}
- /* write out the GRIB data to the output buffer */
+ /* write out the GRIB data to the output buffer */
status = fwrite ( (unsigned char *)output_buffer, sizeof(unsigned char), length, fptr );
-
+
if (status == 0 || length < 100)
printf("\n WARNING: Possible problem writing grib file, number of elements written = %d\n",length);
else if ( length == 0)
@@ -2242,102 +2274,99 @@ if(debugflag)
}
else
printf("\n GRIB file written %s number of elements = %d\n\n",outfn,status);
+
-
- fclose(fptr);
-
+ fclose(fptr);
+
/* If this is precip (APCP) and headerflag is on, write out to a file for NPVU. */
if (headflag >0 && grib_lbl[7]==61)
{
-
-
+
+
/* get current GMT date and time for header */
-
+
time( &curtime);
-
+
curgmtime = gmtime (&curtime);
-
+
sprintf(adayhrmin,"%02d%02d%02d",curgmtime->tm_mday,curgmtime->tm_hour,curgmtime->tm_min);
-/********************************************************************/
+/********************************************************************/
if(debugflag>0)
{
- printf("\n DEBUG: current day hour min GMT = %s\n",adayhrmin);
-}
-/********************************************************************/
-
-
- if(nc_getAppsDefaults("pproc_bin",pprocbin) == -1)
+ printf("\n DEBUG: current day hour min GMT = %s\n",adayhrmin);
+}
+/********************************************************************/
+
+
+ if(getAppsDefaults("pproc_bin",pprocbin) == -1)
{
printf(" ERROR: Invalid token value for token \"pproc_bin\".\n\t Program exit.");
return APSDEFERR;
}
-
-
- /* fortran routine copygb_main_ */
+
+
+
sprintf(file_path,"%s/copygb.LX",pprocbin);
-
+
sprintf(tmpNPVUfn,"%s/%s",gribdir,"tmpNPVU.grb");
-
-
-
+
+
+
if(bflag && qflag==0) /* old - strstr(process,"QPE")==NULL && strstr(process,"qpe")==NULL) */
{
if(debugflag>0)
-
+
/* the -X here causes copygb to print out expanded information about its operation */
-
-/* sprintf(command,"%s -xg218 -X %s %s",file_path, outfn, tmpNPVUfn); */
- sprintf(command,"-xg218 -X %s %s", outfn, tmpNPVUfn);
+
+ sprintf(command,"%s -xg218 -X %s %s",file_path, outfn, tmpNPVUfn);
else
-/* sprintf(command,"%s -xg218 %s %s",file_path, outfn, tmpNPVUfn); */
- sprintf(command,"-xg218 %s %s", outfn, tmpNPVUfn);
-
-
- /* first write out the main GRIB file using the copygb command without the header determined above
- to a temporary holding file. This file will now contain the QPF forecast on GRID218 at 10km
- resolution */
- copygb_main_(command);
- /* status = system(command); */
+ sprintf(command,"%s -xg218 %s %s",file_path, outfn, tmpNPVUfn);
+
+ /* first write out the main GRIB file using the copygb command without the header determined above
+ to a temporary holding file. This file will now contain the QPF forecast on GRID218 at 10km
+ resolution */
+
+ status = system(command);
}
else
{
/* for a QPE grid, keep at the HRAP grid resolution and don't copy to the 218 GRID */
-
+
sprintf(command,"cp %s %s",outfn, tmpNPVUfn);
-
+
status = system(command);
}
-/********************************************************************/
+/********************************************************************/
if(debugflag>0)
{
- printf(" DEBUG: command for temp NPVU grib file=%s \n DEBUG: status of command execution=%d\n",command,status);
-}
-/********************************************************************/
+ printf(" DEBUG: command for temp NPVU grib file=%s \n DEBUG: status of command execution=%d\n",command,status);
+}
+/********************************************************************/
/* create an appropriate filename for the NPVU file */
-
+
if(strstr(outfn,".grb")!=NULL)
{
valptr=strstr(outfn,".grb");
psn=valptr-outfn;
-
+
strncpy(outfnqpf,outfn,psn);
-
+
outfnqpf[psn]='\0';
}
else
strcpy(outfnqpf,outfn);
+
-
-
+
sprintf(outfnqpf,"%s_NPVU.grb",outfnqpf);
-
+
fptrqpf = fopen ( outfnqpf, "wb" );
-
-
+
+
if ( fptrqpf == NULL ) {
printf ( " ERROR: NPVU GRIB file could not be opened.\n" );
return OPENERR;
@@ -2347,16 +2376,16 @@ if(debugflag>0)
printf(" Writing NPVU QPF WMO header info to file %s...\n",outfnqpf);
}
-
+
/* apply appropriate header based on estimate or forecast and number of hours */
-
+
if (fcsth==0)
strcpy(wmohdr1,"ZETA98");
else if (strstr(process,"QPE")!=NULL || strstr(process,"qpe")!=NULL)
strcpy(wmohdr1,"ZETA98");
else
{
-
+
if (fcsth == 6)
strcpy(wmohdr1,"YEIG98");
else if (fcsth == 12)
@@ -2387,12 +2416,12 @@ if(debugflag>0)
" to indicate forecast hour is in the GRIB Product Definition Section\n");
strcpy(wmohdr1,"YEIZ98");
}
- }
-
+ }
+
/* get current GMT date and time for header */
-
+
j=0;
-
+
for (i=0;i0)
}
header[j]=aspace;
j++;
-
+
for (i=0;i0)
}
header[j]=aspace;
j++;
-
+
for (i=0;i0)
{
/* printf("\n j=%d\n",j);*/
for (i=0; i0)
printf(" ERROR: Could not create output buffer for %s...\n",outfnqpf);
return FILEOPERR;
}
-
+
if (setvbuf(tmpfptr,NULL,_IOFBF,BUFFSIZE) !=0)
{
printf(" ERROR: Could not create output buffer for temp NPVU grib file...\n");
return FILEOPERR;
}
-
+
long countr;
char ch;
/* must find the beginning of the GRIB message indicate by "GRIB" */
-
+
while ((ch = getc(tmpfptr)) != EOF)
{
- /* later testing revealed that there was a difference between the temp QPE
+ /* later testing revealed that there was a difference between the temp QPE
and converted QPF GRIB files on the 218 grid where extra characters
- were in the beginning of the QPF GRIB message but not in the QPE
+ were in the beginning of the QPF GRIB message but not in the QPE
which started with "G". Therefore, the first getc above moves the
file position pointer by one and therefore it never sees the "G" in the
QPE file. So while the header was copied, there was no "GRIB" found and none
in the NPVU QPE file. Therefore if this is the first time we get
- a character from the file, reposition it to the beginning and
+ a character from the file, reposition it to the beginning and
grab that character again. Finds "G" for the QPE files now correctly.
*/
-
+
if(firstch==0)
{
fseek(tmpfptr,0L,SEEK_SET);
-
+
firstch=1;
- }
-
+ }
+
countr=ftell(tmpfptr);
ch = getc(tmpfptr);
/* printf(" ch=%c countr=%ld \n",ch, countr); */
if (ch == 'G')
{
ch = getc(tmpfptr);
-
+
if (ch == 'R')
{
-
+
ch = getc(tmpfptr);
if (ch == 'I')
{
-
+
ch = getc(tmpfptr);
if (ch == 'B')
break;
}
}
}
-
-
-
+
+
+
fseek(tmpfptr,countr,SEEK_SET);
}
-
- firstch=0; /* reset the flag for next file */
-
+
+ firstch=0; /* reset the flag for next file */
+
fseek(tmpfptr,countr,SEEK_SET);
-
+
while ((odim = fread (temp,sizeof(unsigned char),BUFFSIZE,tmpfptr)) > 0 )
{
fwrite(temp,sizeof(unsigned char),odim,fptrqpf);
}
-
+
if (ferror(tmpfptr) !=0 )
{
printf(" Error reading temp NPVU grib file \n");
return FILEOPERR;
}
-
+
if (ferror(fptrqpf) !=0 )
{
printf(" Error writing to NPVU grib file %s \n",outfnqpf);
@@ -2560,38 +2589,38 @@ if (debugflag >0)
else
{
printf(" Writing to NPVU grib file appears to be successful\n");
- }
-
-
+ }
+
+
if (tmpfptr != NULL)
- fclose(tmpfptr);
-
+ fclose(tmpfptr);
+
if (fptrqpf !=NULL)
fclose(fptrqpf);
-
+
sprintf(command,"rm -f %s",tmpNPVUfn); /* remove the temporary NPVU file */
-
+
system(command);
-
- }
-
+
+ }
+
/* ensure these are the correct sign for the next data set */
-
+
grib_lbl[32]=firstLon;
-
+
if(grib_lbl[25]==32) /* polar stereo */
- grib_lbl[34]=lonOrigin*1000;
+ grib_lbl[34]=lonOrigin*1000;
else if(grib_lbl[25]==42) /* lambert conformal */
grib_lbl[34]=(*latLonOrigin)*1000;
else
{
printf(" Map projection number %d not supported at this time...Exiting\n",grib_lbl[25]);
return UNERR;
- }
- odim=COPYSIZE; /* reinitialize for copygb */
-
- grib_lbl[24]=16; /* reinitialize for next data set in the NetCDF file */
-
+ }
+ odim=COPYSIZE; /* reinitialize for copygb */
+
+ grib_lbl[24]=16; /* reinitialize for next data set in the NetCDF file */
+
if(oflag==0)
{
if(ofn!=NULL)
@@ -2604,8 +2633,8 @@ if (debugflag >0)
free(ofntemp);
ofntemp=NULL;
}
- }
-
+ }
+
if(onegribflag)
{
gfiles[numgfiles]= (char *) malloc (strlen(outfn)+1);
@@ -2620,21 +2649,21 @@ if (debugflag >0)
strcpy(gfiles[numgfiles],outfn);
numgfiles++;
}
- }
-
-
+ }
+
+
}
-
+
ncclose(cdfid);
-
+
/* if user desires only 1 GRIB file, must combine all into one */
-
+
if (onegribwarn==0 && onegribflag>0 && numgfiles>=2)
{
if(gfiles[0]!=NULL && gfiles[1]!=NULL)
{
sprintf(onegrib,"%s/%s",gribdir,onegfname);
-
+
onegfptr = fopen ( onegrib, "wb");
if ( onegfptr == NULL )
{
@@ -2650,18 +2679,18 @@ if (debugflag >0)
printf(" ERROR: Could not create output buffer for combined GRIB file %s...\n",onegfname);
return FILEOPERR;
}
-
-
+
+
for (i=0;i0)
}
if (fptr != NULL)
fclose(fptr);
-
+
sprintf(command,"rm -f %s",gfiles[i]); /* remove the grib file */
status=system(command);
-
+
}
printf("\n Successfully combined individual GRIB files into this file:\n %s\n",onegrib);
-
+
}
else if (onegribflag>0)
{
printf("\n While attempting to combine files, there was a problem accessing the first two GRIB filenames.\n" \
" Therefore cannot combine GRIB files into one as desired\n");
- }
-
+ }
+
}
else if (onegribflag>0)
printf("\n There was a problem while attempting to combine the GRIB files into one. \n" \
" If number of GRIB files below equals to 1, won't be done. \n" \
" For DEBUG purposes, GRIB warn = %d and number of GRIB files = %d\n",onegribwarn,numgfiles);
+
+/* clean up */
-/* clean up */
-
- printf("\n nc2grib has completed processing for this run.\n");
+ printf("\n nc2grib has completed processing for this run.\n");
if(onegribflag>0)
-
+
if(onegfptr!=NULL)
fclose(onegfptr);
-
+
for (i=0;i0)
free(domainOrigin);
if(domainExtent!=NULL)
free(domainExtent);
-
+
return 0;
}
int basetime_ansi_to_timet(char *ansi, time_t *timet)
{
-
+
struct tm gm_struct;
int rv = 0,
scan_rv = 0;
-
-
-
+
+
+
memset(&gm_struct,0,sizeof(struct tm));
scan_rv = sscanf(ansi, "%4d%2d%2d%2d",
&(gm_struct.tm_year),
&(gm_struct.tm_mon),
&(gm_struct.tm_mday),
&(gm_struct.tm_hour));
-
+
gm_struct.tm_year = gm_struct.tm_year - 1900;
gm_struct.tm_mon = gm_struct.tm_mon - 1;
gm_struct.tm_min = 0;
@@ -2787,7 +2816,7 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char* userformat)
an ANSI time string representation.
*/
-
+
struct tm *gm_struct;
size_t rv = 0;
@@ -2802,7 +2831,7 @@ int timet_to_userformat_ansi(time_t timet, char *ansi, char* userformat)
strcpy(ansi,"");
rv = strftime(ansi, FILE_LEN, userformat, gm_struct);
-
+
return((int) rv);
}
@@ -2873,13 +2902,13 @@ int display_usage(void)
" the -t option above, it MUST be specified for nc2grib to run. \n" \
" netcdf_dir - contains default location of the generated NetCDF files. If not overridden by the \n" \
" -n option above, it MUST be specified for nc2grib to run.\n\n" );
-
+
return 0;
/* ============== Statements containing RCS keywords: */
{static char rcs_id1[] = "$Source: /fs/hseb/ob9d/ohd/pproc/src/nc2grib/RCS/main_nc2grib.c,v $";
- static char rcs_id2[] = "$Id: main_nc2grib.c,v 1.2 2010/06/14 15:04:32 millerd Exp $";}
+ static char rcs_id2[] = "$Id: main_nc2grib.c,v 1.3 2011/10/26 14:49:35 pst Exp $";}
/* =================================================== */
-}
+}
diff --git a/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml b/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml
index c290defa19..8bc9a66b85 100644
--- a/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml
+++ b/ncep/com.raytheon.uf.edex.ncep.feature/feature.xml
@@ -500,5 +500,18 @@
version="0.0.0"
unpack="false"/>
+
+
+
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.classpath b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.classpath
new file mode 100644
index 0000000000..ad32c83a78
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.classpath
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.project b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.project
new file mode 100644
index 0000000000..1737fb2fde
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.project
@@ -0,0 +1,28 @@
+
+
+ gov.noaa.nws.ncep.common.dataplugin.gempak
+
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+ org.eclipse.pde.ManifestBuilder
+
+
+
+
+ org.eclipse.pde.SchemaBuilder
+
+
+
+
+
+ org.eclipse.pde.PluginNature
+ org.eclipse.jdt.core.javanature
+
+
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.settings/org.eclipse.jdt.core.prefs b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000000..e962b2ab45
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,8 @@
+#Wed Jun 05 10:12:54 EDT 2013
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/META-INF/MANIFEST.MF b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/META-INF/MANIFEST.MF
new file mode 100644
index 0000000000..1b015935fe
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/META-INF/MANIFEST.MF
@@ -0,0 +1,9 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Gempak
+Bundle-SymbolicName: gov.noaa.nws.ncep.common.dataplugin.gempak
+Bundle-Version: 1.0.0.qualifier
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174",
+ com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174"
+Export-Package: gov.noaa.nws.ncep.common.dataplugin.gempak.request
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/build.properties b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/build.properties
new file mode 100644
index 0000000000..34d2e4d2da
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/build.properties
@@ -0,0 +1,4 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/gov.noaa.nws.ncep.common.dataplugin.gempak.ecl b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/gov.noaa.nws.ncep.common.dataplugin.gempak.ecl
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.java
new file mode 100644
index 0000000000..eb9c92c143
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.java
@@ -0,0 +1,101 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+import com.raytheon.uf.common.serialization.comm.IServerRequest;
+
+@DynamicSerialize
+public class GetGridDataRequest implements IServerRequest {
+
+ @DynamicSerializeElement
+ private String pluginName;
+
+ @DynamicSerializeElement
+ private String modelId;
+
+ @DynamicSerializeElement
+ private String reftime;
+
+ @DynamicSerializeElement
+ private String fcstsec;
+
+ @DynamicSerializeElement
+ private String level1;
+
+ @DynamicSerializeElement
+ private String level2;
+
+ @DynamicSerializeElement
+ private String vcoord;
+
+ @DynamicSerializeElement
+ private String parm;
+
+ public GetGridDataRequest() {
+ }
+
+ public String getPluginName() {
+ return pluginName;
+ }
+
+ public void setPluginName(String pluginName) {
+ this.pluginName = pluginName;
+ }
+
+ public String getModelId() {
+ return modelId;
+ }
+
+ public void setModelId(String modelId) {
+ this.modelId = modelId;
+ }
+
+ public String getReftime() {
+ return reftime;
+ }
+
+ public void setReftime(String reftime) {
+ this.reftime = reftime;
+ }
+
+ public String getFcstsec() {
+ return fcstsec;
+ }
+
+ public void setFcstsec(String fcstsec) {
+ this.fcstsec = fcstsec;
+ }
+
+ public String getLevel1() {
+ return level1;
+ }
+
+ public void setLevel1(String level1) {
+ this.level1 = level1;
+ }
+
+ public String getLevel2() {
+ return level2;
+ }
+
+ public void setLevel2(String level2) {
+ this.level2 = level2;
+ }
+
+ public String getVcoord() {
+ return vcoord;
+ }
+
+ public void setVcoord(String vcoord) {
+ this.vcoord = vcoord;
+ }
+
+ public String getParm() {
+ return parm;
+ }
+
+ public void setParm(String parm) {
+ this.parm = parm;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.java
new file mode 100644
index 0000000000..a339b0a690
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.java
@@ -0,0 +1,35 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+import com.raytheon.uf.common.serialization.comm.IServerRequest;
+
+@DynamicSerialize
+public class GetGridInfoRequest implements IServerRequest {
+
+ @DynamicSerializeElement
+ private String pluginName;
+
+ @DynamicSerializeElement
+ private String modelId;
+
+ public GetGridInfoRequest() {
+ }
+
+ public String getPluginName() {
+ return pluginName;
+ }
+
+ public void setPluginName(String pluginName) {
+ this.pluginName = pluginName;
+ }
+
+ public String getModelId() {
+ return modelId;
+ }
+
+ public void setModelId(String modelId) {
+ this.modelId = modelId;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.java
new file mode 100644
index 0000000000..3c8f8b62da
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.java
@@ -0,0 +1,35 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+import com.raytheon.uf.common.serialization.comm.IServerRequest;
+
+@DynamicSerialize
+public class GetGridNavRequest implements IServerRequest {
+
+ @DynamicSerializeElement
+ private String pluginName;
+
+ @DynamicSerializeElement
+ private String modelId;
+
+ public GetGridNavRequest() {
+ }
+
+ public String getPluginName() {
+ return pluginName;
+ }
+
+ public void setPluginName(String pluginName) {
+ this.pluginName = pluginName;
+ }
+
+ public String getModelId() {
+ return modelId;
+ }
+
+ public void setModelId(String modelId) {
+ this.modelId = modelId;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.java
new file mode 100644
index 0000000000..fed3c300c2
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.java
@@ -0,0 +1,24 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+import com.raytheon.uf.common.serialization.comm.IServerRequest;
+
+@DynamicSerialize
+public class GetStationsRequest implements IServerRequest {
+
+ @DynamicSerializeElement
+ private String pluginName;
+
+ public GetStationsRequest() {
+ }
+
+ public String getPluginName() {
+ return pluginName;
+ }
+
+ public void setPluginName(String pluginName) {
+ this.pluginName = pluginName;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.java
new file mode 100644
index 0000000000..1b833e93b7
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.java
@@ -0,0 +1,35 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+import com.raytheon.uf.common.serialization.comm.IServerRequest;
+
+@DynamicSerialize
+public class GetTimesRequest implements IServerRequest {
+
+ @DynamicSerializeElement
+ private String pluginName;
+
+ @DynamicSerializeElement
+ private String timeField;
+
+ public GetTimesRequest() {
+ }
+
+ public String getPluginName() {
+ return pluginName;
+ }
+
+ public void setPluginName(String pluginName) {
+ this.pluginName = pluginName;
+ }
+
+ public String getTimeField() {
+ return timeField;
+ }
+
+ public void setTimeField(String timeField) {
+ this.timeField = timeField;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.java
new file mode 100644
index 0000000000..88dcf0f319
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.java
@@ -0,0 +1,25 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import java.util.List;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+
+@DynamicSerialize
+public class GetTimesResponse {
+
+ @DynamicSerializeElement
+ private List> times;
+
+ public GetTimesResponse() {
+ }
+
+ public List> getTimes() {
+ return times;
+ }
+
+ public void setTimes(List> times) {
+ this.times = times;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.java
new file mode 100644
index 0000000000..353bc18e4a
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.java
@@ -0,0 +1,85 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+
+@DynamicSerialize
+public class Station {
+ @DynamicSerializeElement
+ private String stationId;
+
+ @DynamicSerializeElement
+ private int wmoIndex;
+
+ @DynamicSerializeElement
+ private int elevation;
+
+ @DynamicSerializeElement
+ private String country;
+
+ @DynamicSerializeElement
+ private String state;
+
+ @DynamicSerializeElement
+ private double latitude;
+
+ @DynamicSerializeElement
+ private double longitude;
+
+ public String getStationId() {
+ return stationId;
+ }
+
+ public void setStationId(String stationId) {
+ this.stationId = stationId;
+ }
+
+ public int getWmoIndex() {
+ return wmoIndex;
+ }
+
+ public void setWmoIndex(int wmoIndex) {
+ this.wmoIndex = wmoIndex;
+ }
+
+ public int getElevation() {
+ return elevation;
+ }
+
+ public void setElevation(int elevation) {
+ this.elevation = elevation;
+ }
+
+ public String getCountry() {
+ return country;
+ }
+
+ public void setCountry(String country) {
+ this.country = country;
+ }
+
+ public String getState() {
+ return state;
+ }
+
+ public void setState(String state) {
+ this.state = state;
+ }
+
+ public double getLatitude() {
+ return latitude;
+ }
+
+ public void setLatitude(double latitude) {
+ this.latitude = latitude;
+ }
+
+ public double getLongitude() {
+ return longitude;
+ }
+
+ public void setLongitude(double longitude) {
+ this.longitude = longitude;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.java
new file mode 100644
index 0000000000..ceed7db9d9
--- /dev/null
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gempak/src/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.java
@@ -0,0 +1,69 @@
+package gov.noaa.nws.ncep.common.dataplugin.gempak.request;
+
+import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
+import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
+import com.raytheon.uf.common.serialization.comm.IServerRequest;
+import com.raytheon.uf.common.time.DataTime;
+
+@DynamicSerialize
+public class StationDataRequest implements IServerRequest {
+
+ @DynamicSerializeElement
+ private String pluginName;
+
+ @DynamicSerializeElement
+ private String stationId;
+
+ @DynamicSerializeElement
+ private DataTime refTime;
+
+ @DynamicSerializeElement
+ private String parmList;
+
+ @DynamicSerializeElement
+ private String partNumber;
+
+ public StationDataRequest() {
+ }
+
+ public String getPluginName() {
+ return pluginName;
+ }
+
+ public void setPluginName(String pluginName) {
+ this.pluginName = pluginName;
+ }
+
+ public String getStationId() {
+ return stationId;
+ }
+
+ public void setStationId(String stationId) {
+ this.stationId = stationId;
+ }
+
+ public DataTime getRefTime() {
+ return refTime;
+ }
+
+ public void setRefTime(DataTime refTime) {
+ this.refTime = refTime;
+ }
+
+ public String getParmList() {
+ return parmList;
+ }
+
+ public void setParmList(String parmList) {
+ this.parmList = parmList;
+ }
+
+ public String getPartNumber() {
+ return partNumber;
+ }
+
+ public void setPartNumber(String partNumber) {
+ this.partNumber = partNumber;
+ }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF
index 85a64c6f26..cf8ca1880d 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/META-INF/MANIFEST.MF
@@ -5,14 +5,14 @@ Bundle-SymbolicName: gov.noaa.nws.ncep.common.dataplugin.geomag
Bundle-Version: 1.0.0.qualifier
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174",
- javax.persistence;bundle-version="1.0.0",
- org.junit;bundle-version="1.0.0"
+ javax.persistence;bundle-version="1.0.0"
Export-Package: gov.noaa.nws.ncep.common.dataplugin.geomag,
gov.noaa.nws.ncep.common.dataplugin.geomag.dao,
gov.noaa.nws.ncep.common.dataplugin.geomag.exception,
gov.noaa.nws.ncep.common.dataplugin.geomag.table,
gov.noaa.nws.ncep.common.dataplugin.geomag.util
Import-Package: com.raytheon.uf.common.localization,
+ gov.noaa.nws.ncep.common.dataplugin.geomag,
org.apache.commons.logging
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java
index ee02e937f6..b30cb712ff 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.geomag/src/gov/noaa/nws/ncep/common/dataplugin/geomag/GeoMagAvg.java
@@ -84,6 +84,7 @@ public class GeoMagAvg extends PersistableDataObject