17.1.1 build for OAX hydro db / localization

This commit is contained in:
mjames-upc 2017-04-22 20:31:30 -06:00
parent 96742f9082
commit 6ee515d767
34 changed files with 734 additions and 327 deletions

View file

@ -1,4 +1,4 @@
export AW_SITE_IDENTIFIER=BOU
export AW_SITE_IDENTIFIER=OAX
export EDEX_SERVER=localhost
# database

View file

@ -144,7 +144,7 @@ class DeployInstall
String localizationProject = "localization"
DeployEdexSiteLocalization.deploy(this.edexRootDirectory,
this.projectInformationMap[localizationProject], "BOU")
this.projectInformationMap[localizationProject], "OAX")
}

View file

@ -1,5 +1,5 @@
<project default="main" basedir=".">
<property name="localization.sites" value="BOU" />
<property name="localization.sites" value="OAX" />
<property name="deploy.python" value="true" />
<property name="python.packages" value="pypies" />
<property name="edex.root" value="/awips2/edex" />

View file

@ -288,8 +288,8 @@
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.gfe.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.damagepath.feature" />
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.d2d.damagepath.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature" value="com.raytheon.uf.viz.gisdatastore.feature" />

View file

@ -160,7 +160,7 @@ The command **edex setup** attempts to add the domain name of your server.
- **/awips2/edex/bin/setup.env** should contain the fully-qualified domain name, externally resolved, localhost will not work.
export AW_SITE_IDENTIFIER=BOU
export AW_SITE_IDENTIFIER=OAX
export EDEX_SERVER=edex-cloud.unidata.ucar.edu
- **/awips2/ldm/etc/ldmd.conf** contains the upstream server (default *idd.unidata.ucar.edu*, which requires you connect form a .edu domain). This file also contains the **edexBridge** hostname (default *localhost*).

View file

@ -8,7 +8,7 @@
<property name="build.ws"
value="gtk" />
<property name="build.arch"
value="x86_86" />
value="x86" />
<property name="lightning"
value="false" />
@ -61,16 +61,10 @@
<param name="feature"
value="com.raytheon.uf.edex.grid.feature" />
</antcall>
<!--
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.ohd.feature" />
</antcall>
-->
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.archive.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.archive.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.text.feature" />
@ -79,10 +73,6 @@
<param name="feature"
value="com.raytheon.uf.edex.satellite.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.goesr.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.gfe.feature" />
@ -95,6 +85,10 @@
<param name="feature"
value="com.raytheon.uf.edex.radar.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.dat.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.config.auto.feature" />
@ -111,34 +105,62 @@
<param name="feature"
value="com.raytheon.uf.edex.ost.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.npp.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.request.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.client.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.remote.script.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.npp.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.request.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.client.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.registry.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.remote.script.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.binlightning.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="gov.noaa.nws.obs.edex.geodb.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="edu.wisc.ssec.cimss.edex.convectprob.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="gov.nasa.msfc.sport.edex.glmdecoder.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="gov.nasa.msfc.sport.edex.sportlma.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.ost.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="gov.noaa.nws.crh.edex.grib.decoderpostprocessor.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="gov.noaa.nws.obs.edex.geodb.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="gov.noaa.nws.sr.oun.edex.mping.feature" />
</antcall>
<antcall target="build">
<param name="feature"
value="com.raytheon.uf.edex.goesr.feature" />
</antcall>
</target>

View file

@ -16,7 +16,7 @@ deploy.gfesuite=true
esb.overwrite=false
# a colon (:) separated list of localization sites; the associated
# localization files will be deployed for the specified site.
localization.sites=BOU
localization.sites=OAX
# the python installation directory.
python.root.directory=/awips2/python

View file

@ -129,6 +129,10 @@
#02/18/14 - Added section for run_report_alarm service configuration.
#04/10/14 - Added token for DuaPol radar productts processed in MPE, HPE/HPN
#05/05/2014 - #3026 added hpe_grib_input_dir token
#10/03/2016 - Added new tokens DB_ssl_cert_dir and DB_ssl_mode for SSL certificates
# to use to connect to the database.
#11/21/2016 - Change DB_ssl_mode to verify-ca
#02/23/2017 - set pguser=awips
# ==============================================================================
# To see syntax rules for this file, see the bottom of this file
@ -145,17 +149,10 @@
#$ tags. Token names and commented lines should at column 1.
#AWIPS_MODIFICATION_BLOCK_BEGIN
fxa_local_site : BOU
mpe_site_id : bou
hv_center_lat : 40.0 # HydroView center latitude
hv_center_lon : -105.0 # HydroView center longitude
hv_map_width : 860 # HydroView map width (nautical miles)
whfs_primary_radar : FTG # WHFS primary radar id, for Stage II
whfs_tz : MST6MDT # WHFS time zone code
apps_dir : /awips2/edex/data/share/hydroapps # Hydrologic applications directory
apps_dir : $(SHARE_DIR)/hydroapps # Hydrologic applications directory
data_archive_root : /awips2/data_store # root directory of the data archive
data_archive_root : /data_store # root directory of the data archive
mcp3_icp_iface : $(HOME)/mcp3_ntrfc
#mcp3_icp_iface : /tmp/$(LOGNAME)/mcp3_ntrfc
@ -169,24 +166,24 @@ vsys_dir : $(apps_dir)/rfc/verify #base verify directory
#AWIPS_MODIFICATION_BLOCK_END
#===================== Apps/Script Execution Tokens =================================
WhfsSrv : OFF
WhfsSrv.purge_files : OFF
WhfsSrv.run_db_purge : OFF
WhfsSrv.run_floodseq : OFF
PprocSrv : OFF
PprocSrv.purge_mpe_files : OFF
PprocSrv.purge_hpe_file : OFF
MpeFieldGenSrv.run_mpe_fieldgen : OFF
WhfsSrv.run_pdc_pp : OFF
WhfsSrv.run_alarm_whfs : OFF
WhfsSrv.run_alarm_whfs.run_roc_checker : OFF
WhfsSrv.run_alarm_whfs.run_report_alarm : OFF
WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : OFF
ArealQpeGenSrv : OFF
DqcPreProcSrv : OFF
DqcPreProcSrv.run_dqc_preprocessor : OFF
MpeRUCFreezingLevel : OFF
MpeLightningSrv : OFF
WhfsSrv : ON
WhfsSrv.purge_files : ON
WhfsSrv.run_db_purge : ON
WhfsSrv.run_floodseq : ON
PprocSrv : ON
PprocSrv.purge_mpe_files : ON
PprocSrv.purge_hpe_file : ON
MpeFieldGenSrv.run_mpe_fieldgen : ON
WhfsSrv.run_pdc_pp : ON
WhfsSrv.run_alarm_whfs : ON
WhfsSrv.run_alarm_whfs.run_roc_checker : ON
WhfsSrv.run_alarm_whfs.run_report_alarm : ON
WhfsSrv.run_alarm_whfs.run_report_alarm.textdb : ON
ArealQpeGenSrv : ON
DqcPreProcSrv : ON
DqcPreProcSrv.run_dqc_preprocessor : ON
MpeRUCFreezingLevel : ON
MpeLightningSrv : ON
#====================================================================================
#===================== run_report_alarm Configuration ===============================
@ -247,18 +244,24 @@ mpe_display_maps : statesCounties
# database selection tokens
server_name : ONLINE # Informix database server name
db_name : hd_ob92oax # IHFS database name
damcat_db_name : dc_ob7oax # Dam Catalog database name
db_name : hd_ob92oax # IHFS database name
damcat_db_name : dc_ob5oax # Dam Catalog database name
hdb_db_name : ob81_histdata # Historical database.
pghost : localhost # The machine PostGres is running on
pguser : awips # The user allowed to access PostGres
pghost : localhost # The machine PostGres is running on
pguser : awips # The user allowed to access PostGres
pgport : 5432 # The PostGres Server port
adb_name : adb_ob7xxx # RFC archive database name
rax_pghost : localhost # The machine PostGres is running on for the adb
adb_name : adb_ob7oax # RFC archive database name
rax_pghost : localhost # The machine PostGres is running on for the adb
DB_ssl_cert_dir :$(HOME)/.postgresql # path to the ssl keys directory
DB_ssl_mode :verify-ca # if DB_ssl mode is not "disable"
# vacuum log dir token.
vacuum_log_dir : $(whfs_log_dir)/vacuum
# WHFS specific tokens
whfs_tz : EST5EDT # WHFS time zone for local time
whfs_primary_radar : TLX # WHFS primary radar id, for Stage II
# damcat tokens
damcat_hostoffice_type : wfo # source of run-from office
damcat_office_datasource : ohd # which data source is used
@ -284,27 +287,27 @@ damcrest_res_dir : $(whfs_config_dir)/damcrest
#===================== SHEFDECODE Application Tokens ================================
shefdecode_userid : oper # controlling UNIX user
shefdecode_host : localhost # controlling UNIX system.
shefdecode_host : dx1f # controlling UNIX system.
shefdecode_dir : $(apps_dir)/shefdecode # main directory location
shefdecode_bin : $(shefdecode_dir)/bin # executable programs location
shefdecode_input : $(shefdecode_dir)/input # SHEF parameter file location
shef_data_dir : /awips2/edex/data/shef # input products location
shef_data_dir : /data/fxa/ispan/hydro # input products location
shefdecode_log : $(shefdecode_dir)/logs/decoder # daily log files location
shef_error_dir : $(shefdecode_dir)/logs/product # product log files location
shef_keeperror : ALWAYS # keep product log files (=ALWAYS) or
# only when errors occur (=IF_ERROR)
shef_perflog : OFF # ON/OFF - create a separate performance log file to
shef_perflog : ON # ON/OFF - create a separate performance log file to
# save internal decoder timing messages for
# monitoring performance
shef_data_log : OFF # ON/OFF - include messages in the log file detailing
shef_data_log : ON # ON/OFF - include messages in the log file detailing
the SHEF records
dupmess : OFF # ON/OFF - include messages in the log file about
dupmess : ON # ON/OFF - include messages in the log file about
# duplicate data
elgmess : OFF # ON/OFF - include messages in the log file about
elgmess : ON # ON/OFF - include messages in the log file about
# data types not found in IngestFilter or
# data types turned off in IngestFilter
locmess : OFF # ON/OFF - include messages in the log file about
locmess : ON # ON/OFF - include messages in the log file about
# stations and areas not found in Location
# or GeoArea
@ -317,7 +320,7 @@ shef_duplicate : IF_DIFFERENT # flag for handling duplicate date
# IF_DIFFERENT-overwrite if new value is different
# IF_DIFFERENT_OR_REVCODE-overwrite if new value is
# different or revcode is set
shef_load_ingest : OFF # ON/OFF - automatically load the IngestFilter table or not
shef_load_ingest : ON # ON/OFF - automatically load the IngestFilter table or not
# with (station id-PEDTSE) combinations as they
# arrive in the input data flow
shef_storetext : OFF # ON/OFF - post/don't post raw encoded SHEF text messages
@ -333,14 +336,14 @@ shef_post_baddata : REJECT # PE/REJECT - post data that have failed the gross
shef_procobs : OFF # ON/OFF - post Processed data values (i.e., TS=P*) to
# the observation data tables (=ON) or to
# the ProcValue table (=OFF)
shef_post_latest : OFF # ON/OFF - post/don't post data to the LatestObsValue table
shef_post_latest : ON # ON/OFF - post/don't post data to the LatestObsValue table
# VALID_ONLY - post data to the LatestObsValue table
# ONLY if the gross range check is passed
shef_post_link : OFF # ON/OFF - post/don't post data to the ProductLink table
shef_load_maxfcst : OFF # ON/OFF - after each product that resulted in forecast
shef_post_link : ON # ON/OFF - post/don't post data to the ProductLink table
shef_load_maxfcst : ON # ON/OFF - after each product that resulted in forecast
# height or discharge data being posted, load
# the maximum forecast data into the RiverStatus table
shef_alertalarm : OFF # ON/OFF - causes shefdecoder to screen data against
shef_alertalarm : ON # ON/OFF - causes shefdecoder to screen data against
# alert and alarm thresholds
# -- Intermediate output from ShefParser prior to post
shef_out : OFF
@ -348,7 +351,7 @@ shef_out : OFF
#===================== WHFS Applications Tokens ================================
whfs_base_dir : /awips2/edex/data/share/hydroapps/whfs # top of the WHFS tree
whfs_base_dir : $(apps_dir)/whfs # top of the WHFS tree
whfs_local_dir : $(whfs_base_dir)/local # top of WHFS local tree
whfs_local_data_dir : $(whfs_local_dir)/data # top of WHFS local data tree
whfs_local_grid_dir : $(whfs_local_data_dir)/grid # top of WHFS grids tree
@ -387,7 +390,7 @@ hb_gagrad_log_dir : $(whfs_log_dir)/create_gagradloc # gage-radar locator l
qcalarm_log_dir : $(whfs_log_dir)/qcalarm # batch QC logs
db_purge_log_dir : $(whfs_log_dir)/db_purge # db_purge token
db_purge_backup_retention_use : OFF # db_purge token for using backup retention value
db_purge_backup_retention_use : ON # db_purge token for using backup retention value
purge_files_log_dir : $(whfs_log_dir)/misc # purge_files token
@ -400,20 +403,23 @@ sws_home_dir : $(whfs_bin_dir)/pa # SWS dir
# -----------------------------------------------------------------
gage_pp_userid : oper # controlling UNIX user
gage_pp_host : localhost # controlling UNIX system
gage_pp_host : dx # controlling UNIX system
gage_pp_data : $(pproc_local_data)/gpp_input # input data files location
gage_pp_log : $(pproc_log)/gage_pp # daily log files location
gage_pp_sleep : 10 # sleep duration in seconds in between queries
gage_pp_enable : OFF # gpp enabled; shef uses to determine post
gage_pp_enable : ON # gpp enabled; shef uses to determine post
shef_post_precip : OFF # post to Precip/CurPrecip tables
build_hourly_enable : OFF # Enable the build_hourly application
build_hourly_enable : ON # Enable the build_hourly application
# ----------------------------------------------------------------
# The following tokens are most likely to be customized by the user
# (the first 4 MUST be customized at each site in the .Apps_defaults_site file)
# ----------------------------------------------------------------
hv_center_lat : 35.0 # HydroView center latitude
hv_center_lon : -97.8 # HydroView center longitude
hv_height_in_pixels : 900 # Hydroview map height in pixels
hv_width_in_pixels : 1200 # Hydroview map width in pixels
hv_map_width : 320 # HydroView map width (nautical miles)
hv_pointdata_display : ON # Hydroview point data display flag (ON, OFF)
hv_hours_in_window : 4 # Change window hours
hv_zoom_out_limit : 20 # Limits how far the map can be zoomed out
@ -464,11 +470,12 @@ event_expire_withinhr : 3 # in RiverPro
#=====Tokens To Generate Areal FFG from Mosaicked FFG Grids for Use By SSHP=====
# (NOTE: gaff_rfc_list MUST be customized at EVERY Field Office)
gaff_execution : OFF # ON/OFF token for the gen_areal_ffg process
gaff_execution : ON # ON/OFF token for the gen_areal_ffg process
# the gen_areal_ffg process is run from the
# process_dpa_files script at WFOs
gaff_rfc_list : MBRFC,NCRFC,NWRFC,CNRFC,CBRFC,WGRFC,ABRFC,LMRFC,SERFC,OHRFC,MARFC,NERFC # list of RFCs to be mosaicked
gaff_rfc_list : ABRFC,LMRFC # list of RFCs to be mosaicked
# list is comma separated, no embedded
# spaces are allowed
gaff_input_dir : $(EDEX_HOME)/data/processing
# directory containing gridded FFG
# generated by RFCs
@ -688,7 +695,7 @@ dpa_arch_dir : $(pproc_local_data)/stage1_archive # DPA archives
dpa_wind : 10
dpa_filter_decode : OFF # flag for non-top-of-hour
dpa_filter_decode : ON # flag for non-top-of-hour
# filtering of decoded products
# ON - filter products for decode
# OFF - do not filter (ie decode all products)
@ -887,6 +894,7 @@ mpe_fieldgen_product_dir : $(mpe_dir)
### MPE station list tokens
mpe_station_list_dir : $(mpe_app_dir)/station_lists
mpe_site_id : ounx
mpe_area_names : $(mpe_site_id)
### MPE static data files
@ -964,8 +972,8 @@ mpe_localfield3_dir : $(mpe_fieldgen_product_dir)/localfield3
### Tokens related to the MPE Editor map display.
mpe_config_dir : $(whfs_config_dir)
mpe_center_lat : 40.0
mpe_center_lon : -105.0
mpe_center_lat : 39.8
mpe_center_lon : -98.55
mpe_height_in_pixels : 900
mpe_width_in_pixels : 1200
mpe_map_width : 1320
@ -974,7 +982,7 @@ mpe_disclosure_limit : 60
mpe_map_projection : FLAT
### Misc tokens
mpe_load_hourlypc : OFF
mpe_load_hourlypc : ON
mpe_gageqc_gif_dir : $(whfs_image_dir)
mpe_gif_location : 34.0,-97.0,34.0,-94.0,33.0,-94.0
mpe_overlay_dir : $(whfs_geodata_dir)
@ -984,7 +992,7 @@ mpe_type_source : RG:GOES,RR:ALERT,RM:SNOTEL,RP:LARC,RZ:COOP
### New tokens for DQC/CHPS
mpe_level2_type_value : 2 # Allow user to customize the type value. The default is “2”
mpe_td_details_set : OFF # Allow generating a time distribution details file.
mpe_process_PC : OFF # Skip call to the load_PC_hourly routine if "OFF"
mpe_process_PC : ON # Skip call to the load_PC_hourly routine if "OFF"
mpe_map_one_zone : OFF # Allow MAP generation for one zone only
fewsgrib_dir : $(mpe_gageqc_dir)/fewsgrib # default nc2grib grib file output dir
nc2g_app_dir : $(mpe_app_dir)/nc2grib # directory for gfe2grib.txt file
@ -1035,7 +1043,7 @@ mpe_netcdf_id :
mpe_grib_id :
### mpe gage QC tokens
mpe_gage_qc : OFF
mpe_gage_qc : ON
mpe_sccqc_threshold : 2.0
mpe_scc_boxes_failed : 4
mpe_msc_precip_limit : 1.0
@ -1089,11 +1097,11 @@ lightning_log_dir : $(pproc_log)/lightning_proc
### tokens for D2D display
mpe_d2d_display_grib : OFF # ON/OFF token to determine if further
mpe_d2d_display_grib : ON # ON/OFF token to determine if further
# processing of grib file for D2D display
# is required
d2d_input_dir : /awips2/data_store/ingest/mpe # dir containing grib files
d2d_input_dir : $(EDEX_HOME)/data/manual/mpe # dir containing grib files
# to be processed for D2D display
mpe_send_grib : OFF # ON/OFF token to determine if grib file is
@ -1148,7 +1156,7 @@ dsp_error_dir : $(pproc_local_data)/dsp_error # DSP error files
dsp_arch_dir : $(pproc_local_data)/dsp_archive # DSP archives
hpe_grib_input_dir : /awips2/edex/data/local/hpegrib
hpe_grib_input_dir : $(EDEX_HOME)/data/local/hpegrib
hpe_generate_list : DHRMOSAIC,BDHRMOSAIC,ERMOSAIC,LSATPRE,EBMOSAIC
hpe_qpe_fieldtype : ERMOSAIC # field type to be saved as qpe
@ -1218,7 +1226,7 @@ dsp_duration : 60
hpe_base_radar_mosaic : ERMOSAIC
hpe_qpe_fieldtype : ERMOSAIC
hpe_load_misbin : OFF
hpe_debug_log : OFF
hpe_debug_log : ON
hpe_use_locbias : OFF
hpe_runfreq : 5
hpe_timelag : 5
@ -1228,11 +1236,11 @@ hpe_purge_logage : 720
hpe_purge_fileage : 180
hpe_purge_xmrgage : 75
dhrmosaic_d2d_display_grib : OFF
ermosaic_d2d_display_grib : OFF
ebmosaic_d2d_display_grib : OFF
bdhrmosaic_d2d_display_grib : OFF
hpe_run_nowcast : OFF
dhrmosaic_d2d_display_grib : ON
ermosaic_d2d_display_grib : ON
ebmosaic_d2d_display_grib : ON
bdhrmosaic_d2d_display_grib : ON
hpe_run_nowcast : ON
hpe_nowcast_generate_list : PRTM, BPTRM
hpe_nowcast_dir : $(hpe_output_dir)/nowcast
hpe_rate_save_grib : save
@ -1241,7 +1249,7 @@ hpe_tp1h_save_grib : save
hpe_btp1h_save_grib : save
hpe_4km_tp1h_save_grib : nosave
hpe_4km_btp1h_save_grib : nosave
nowcast_d2d_display_grib : OFF
nowcast_d2d_display_grib : ON
hpe_smooth_method : 1 # 0=no smoothing 1=FFP method (default) 2=BZ94 method
hpn_use_meanvelocity : OFF
hpn_meanvelocity_direction : 45 # direction precip is moving towards
@ -1664,18 +1672,18 @@ adb_shef_pro_err_dir : $(adb_logs_dir)/decoder/processed/err # pathname for
# product logs directory
adb_shef_pro_keeperror : ALWAYS # keep files (=ALWAYS) or only
# when errors occur (=IF_ERROR)
adb_shef_raw_checktab : OFF # ON checks location and ingestfilter tables
adb_shef_raw_checktab : ON # ON checks location and ingestfilter tables
adb_shef_pro_checktab : OFF # ON checks location and ingestfilter tables
adb_shef_duplicate_raw : USE_REVCODE # Token for allowing duplicate records to be
# posted for raw decoder.
adb_shef_duplicate_pro : USE_REVCODE # Same thing but for processed decoder.
adb_shef_raw_dupmess : OFF # duplication messages from adb raw decoder.
adb_shef_raw_locmess : OFF # invalid location messages from adb raw decoder.
adb_shef_raw_elgmess : OFF # invalid ingestfilter messages from adb raw
adb_shef_raw_dupmess : ON # duplication messages from adb raw decoder.
adb_shef_raw_locmess : ON # invalid location messages from adb raw decoder.
adb_shef_raw_elgmess : ON # invalid ingestfilter messages from adb raw
# decoder.
adb_shef_raw_storall : OFF # OFF - default- will only write to pecrsep table
# ON will write to both pecrsep and peirsep tables
adb_shef_pro_dupmess : OFF # duplication messages from adb processed decoder.
adb_shef_pro_dupmess : ON # duplication messages from adb processed decoder.
adb_shef_pro_locmess : OFF # invalid location messages from adb pro decoder.
adb_shef_pro_elgmess : OFF # invalid ingestfilter messages from adb pro
# decoder.
@ -1815,8 +1823,8 @@ sshp_ingest_xml_dir : $(local_data_sshp_dir)/ingest_xml
sshp_incoming_dir : $(local_data_sshp_dir)/incoming
sshp_outgoing_dir : $(local_data_sshp_dir)/outgoing
sshp_log_dir : $(whfs_log_dir)/sshp
sshp_java_process_host : localhost
sshp_invoke_map_preprocess: OFF
sshp_java_process_host : px1f
sshp_invoke_map_preprocess: ON
sshp_map_qpe_to_use : MIXED # choices are: MIXED, LOCAL_BEST_ONLY, RFC_ONLY
sshp_fcst_ts : FZ # SSHP type-source code for generated forecasts
sshp_initial_forecast_length: 24 # length of forecast in hours
@ -1878,7 +1886,7 @@ hdb_map_projection : FLAT # The initial map projection used by HDB.
# ====================== DHM Token =======================
dhm_data_dir : $(ofs_files)/$(ofs_level)/dhmdata # DHM data dir
dhm_d2d_data_dir : /data/fxa/Grid/LOCAL/netCDF/DHM # d2d data dir
dhm_d2d_notify_bin_dir : /awips2/fxa/bin # d2d notify bin dir
dhm_d2d_notify_bin_dir : /awips/fxa/bin # d2d notify bin dir
rdhm_input_dir : $(geo_data)
dhm_rain_plus_melt_data_dir: $(geo_data)
@ -1898,7 +1906,7 @@ daa_archive_window : 7 # number of minutes around top of hour
daa_wind : 5
daa_decode_window : 5
daa_filter_decode : OFF # whether time window filtering is on or off
daa_filter_decode : ON # whether time window filtering is on or off
daa_min_coverage_dur : 60

View file

@ -0,0 +1,223 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.ndm.dataplugin.subscriber;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import com.raytheon.uf.common.localization.ILocalizationFile;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.pointdata.vadriver.VA_Driver;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.edex.ndm.ingest.INationalDatasetSubscriber;
import com.raytheon.uf.edex.plugin.modelsounding.decoder.ModelSoundingDataAdapter;
/**
* Subscriber to update the local model sounding sites whenever the national spi
* file changes.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------- -------- ----------- --------------------------
* Jan 29, 2011 bfarmer Initial creation
* Dec 02, 2013 2537 bsteffen Ensure streams are closed.
* Mar 06, 2014 2876 mpduff New NDM plugin.
* Mar 02, 2016 5434 bkowal Relocated to ndm dataplugin.
* Jul 11, 2016 5744 mapeters Save to common_static (not edex_static)
*
* </pre>
*
* @author bfarmer
*/
public class ModelBufrSubscriber implements INationalDatasetSubscriber {
private static final String MODEL_STATION_LIST = ModelSoundingDataAdapter.MODEL_STATION_LIST;
private static final String MODEL_STATION_INFO = "modelBufrStationInfo.txt";
private static final String MODEL_GOODNESS = "modelBufr.goodness";
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(ModelBufrSubscriber.class);
@Override
public void notify(String fileName, File file) {
statusHandler.handle(Priority.EVENTA,
"modelBufr:Processing input file [" + fileName + "]");
if ("modelBufr.spi".equals(fileName)) {
IPathManager pathMgr = PathManagerFactory.getPathManager();
LocalizationContext lc = pathMgr.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.SITE);
ILocalizationFile outLocFile = pathMgr.getLocalizationFile(lc,
ModelSoundingDataAdapter.SPI_FILE);
saveFile(file, outLocFile);
ModelSoundingDataAdapter.updateSPIData();
} else if (MODEL_STATION_LIST.equals(fileName)
|| MODEL_STATION_INFO.equals(fileName)) {
// Both are saved as MODEL_STATION_LIST in localization
processModelStationTxtFile(file);
}
}
private void processModelStationTxtFile(File file) {
IPathManager pathMgr = PathManagerFactory.getPathManager();
LocalizationContext lc = pathMgr.getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.SITE);
ILocalizationFile outLocFile = pathMgr.getLocalizationFile(lc,
MODEL_STATION_LIST);
File goodnessFile = pathMgr.getFile(lc, MODEL_GOODNESS);
saveFile(file, outLocFile);
generateSPI(file, goodnessFile);
File spiFile = pathMgr.getFile(lc, ModelSoundingDataAdapter.SPI_FILE);
if (!spiFile.exists()) {
try {
spiFile.createNewFile();
} catch (IOException e) {
statusHandler.handle(
Priority.SIGNIFICANT,
"modelBufr:Could not create spiFile file: "
+ spiFile.getName(), e);
}
}
VA_Driver driver = new VA_Driver();
driver.setWeight(0.5f);
driver.vaStationsFile(goodnessFile, null, spiFile);
// updateStationList will reload spi files also
ModelSoundingDataAdapter.update();
}
/**
*
* @param file
* @param goodnessFile
*/
private void generateSPI(File file, File goodnessFile) {
String line;
String[] splitLine;
try {
try (BufferedReader fis = new BufferedReader(new FileReader(file));
BufferedWriter fos = new BufferedWriter(new FileWriter(
goodnessFile))) {
while ((line = fis.readLine()) != null) {
if (line.length() > 0) {
// check for commented lines
if ('#' != line.charAt(0)) {
try {
splitLine = line.split("\\|");
Integer elevation;
Double latitude;
Double longitude;
String cause = "elevation";
try {
elevation = Integer.parseInt(splitLine[4]
.trim());
cause = "latitude";
latitude = Double.parseDouble(splitLine[2]
.trim());
cause = "longitude";
longitude = Double.parseDouble(splitLine[3]
.trim());
} catch (NumberFormatException nfe) {
String err = String
.format("modelBufr:Invalid %s in data line [%s]",
cause, line);
statusHandler.handle(Priority.PROBLEM, err);
continue;
}
String stationName = splitLine[1].trim();
fos.write("0 ");
fos.write(stationName);
fos.write(String.format(" %8.4f %9.4f %5d %9d",
latitude, longitude, elevation, 0));
fos.newLine();
} catch (Exception e) {
String err = String.format(
"modelBufr:Error in data line [%s]",
line);
statusHandler.handle(Priority.PROBLEM, err, e);
continue;
}
}
}
}
}
} catch (IOException e) {
statusHandler.handle(Priority.SIGNIFICANT,
"modelBufr:Could not read File ", e);
}
}
/**
* Save the contents of the given File to the given ILocalizationFile
*
* @param file
* @param outFile
*/
private void saveFile(File file, ILocalizationFile outFile) {
if ((file != null) && file.exists()) {
try (BufferedReader fis = new BufferedReader(new FileReader(file));
BufferedWriter fos = new BufferedWriter(
new OutputStreamWriter(outFile.openOutputStream()))) {
String line = null;
try {
while ((line = fis.readLine()) != null) {
fos.write(line);
fos.newLine();
}
} catch (IOException e) {
statusHandler.handle(Priority.PROBLEM,
"Could not read file: " + file.getName(), e);
}
} catch (FileNotFoundException e) {
statusHandler.handle(Priority.PROBLEM, "Failed to find file: "
+ file.getName(), e);
} catch (LocalizationException e) {
statusHandler.handle(
Priority.PROBLEM,
"Failed to open output stream for file: "
+ outFile.getPath(), e);
} catch (IOException e) {
// Error occurred closing fis/fos, ignore
}
}
}
}

View file

@ -17,7 +17,7 @@
</bean>
<bean id="afosToAwipsListener" class="com.raytheon.uf.edex.plugin.text.dbsrv.ingest.AfosToAwipsListener">
<constructor-arg value="${afos2awips.legacyFileLocation}"/>
<constructor-arg value="/awips2/edex/data/utility/common_static/base/afos2awips/afos2awips.txt"/>
</bean>
<bean id="afosBrowserListener" class="com.raytheon.uf.edex.plugin.text.dbsrv.ingest.AfosBrowserModelSubscriber" />

View file

@ -1,2 +0,0 @@
# Where to write legacy afos2awips file
afos2awips.legacyFileLocation=/data/fxa/afos2awips.txt

View file

@ -4,7 +4,7 @@
# the input file contains 3 fields:
# 1) afos cccnnnxxx 2) wmo ttaaoo 3) awips cccc
# Last Modified: 01/31/2002
export FILENAME=/awips/fxa/data/afos2awips.txt
export FILENAME=/awips2/edex/data/utility/common_static/base/afos2awips/afos2awips.txt
if [[ $1 = "" ]]
then

View file

@ -5,7 +5,7 @@
# 1) afos cccnnnxxx 2) wmo ttaaoo 3) awips cccc
#
# Last Modified: 01/31/2002
export FILENAME=/awips/fxa/data/afos2awips.txt
export FILENAME=/awips2/edex/data/utility/common_static/base/afos2awips/afos2awips.txt
if [[ $1 = "" ]]
then

View file

@ -8,7 +8,7 @@
#
# Last Modified: 01/31/2002
#
export FILENAME=/awips/fxa/data/afos2awips.txt
export FILENAME=/awips2/edex/data/utility/common_static/base/afos2awips/afos2awips.txt
if [[ $1 = "" ]]
then

View file

@ -63,3 +63,5 @@ rm -rf ${RPM_BUILD_ROOT}
%defattr(644,awips,fxalpha,700)
%dir /awips2
%dir /awips2/data
/%files

View file

@ -25,7 +25,6 @@ Provides: awips2-static-user
Requires: libpng
Requires: awips2-postgresql
Requires: awips2-psql
Requires: awips2-database-configuration
Requires: netcdf = 4.1.2
Requires: netcdf-devel = 4.1.2
@ -46,6 +45,19 @@ fi
%build
%install
mkdir -p ${RPM_BUILD_ROOT}/awips2/data
if [ $? -ne 0 ]; then
exit 1
fi
PROJECT_DIR="Installer.database"
CONFIGURATION_DIR="rpms/awips2.core/${PROJECT_DIR}/configuration"
CONF_FILE="postgresql.conf"
cp %{_baseline_workspace}/${CONFIGURATION_DIR}/${CONF_FILE} \
${RPM_BUILD_ROOT}/awips2/data
mkdir -p ${RPM_BUILD_ROOT}/awips2/database
if [ $? -ne 0 ]; then
exit 1
@ -93,19 +105,9 @@ cp -r %{_baseline_workspace}/${PATH_TO_REPLICATION}/* \
touch ${RPM_BUILD_ROOT}/awips2/database/sqlScripts/share/sql/sql_install.log
%pre
# Verify that one of the official AWIPS II PostgreSQL configuration files exist.
if [ ! -f /awips2/data/postgresql.conf ]; then
echo "ERROR: /awips2/data/postgresql.conf does not exist. However, "
echo " the AWIPS II PostgreSQL Configuration RPM is installed. "
echo " If you recently uninstalled awips2-database and purged "
echo " the /awips2/data directory, you will need to re-install "
echo " the AWIPS II PostgreSQL configuration rpm so that the "
echo " postgresql.conf file will be restored."
exit 1
fi
if [ "${1}" = "2" ]; then
exit 0
# Remove any existing postgresql.conf files
if [ -f /awips2/data/postgresql.conf ]; then
rm -f /awips2/data/postgresql.conf
fi
%post
@ -310,12 +312,16 @@ copy_addl_config
rm -rf ${RPM_BUILD_ROOT}
%files
%defattr(644,awips,fxalpha,700)
%dir /awips2/data
%defattr(644,awips,fxalpha,755)
%dir /awips2
%dir /awips2/database
%dir /awips2/database/sqlScripts
%dir /awips2/database/replication
%dir /awips2/database/sqlScripts/share
/awips2/data/postgresql.conf
/awips2/database/sqlScripts/share/sql/sql_install.log
/awips2/database/sqlScripts/share/sql/pg_hba.conf
/awips2/database/replication/README

View file

@ -54,15 +54,15 @@ fi
%build
%install
mkdir -p %{_build_root}%{_installation_directory}/edex-environment/scripts
mkdir -p %{_build_root}%{_installation_directory}/edex/environment/scripts
if [ $? -ne 0 ]; then
exit 1
fi
mkdir -p %{_build_root}%{_installation_directory}/edex-environment/macro/utilities
mkdir -p %{_build_root}%{_installation_directory}/edex/environment/macro/utilities
if [ $? -ne 0 ]; then
exit 1
fi
mkdir -p %{_build_root}%{_installation_directory}/edex-environment/macro/functions
mkdir -p %{_build_root}%{_installation_directory}/edex/environment/macro/functions
if [ $? -ne 0 ]; then
exit 1
fi
@ -76,7 +76,7 @@ mkdir -p %{_build_root}/usr/local/edex-environment
# "install" the wes2bridge utilities
cd %{_baseline_workspace}/com.raytheon.wes2bridge.common
/awips2/ant/bin/ant -f build.xml \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex-environment/macro/utilities \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex/environment/macro/utilities \
-Declipse.directory=%{_uframe_eclipse} \
-Dbaseline.dir=%{_baseline_workspace}
if [ $? -ne 0 ]; then
@ -84,7 +84,7 @@ if [ $? -ne 0 ]; then
fi
cd %{_baseline_workspace}/com.raytheon.wes2bridge.configuration
/awips2/ant/bin/ant -f build.xml \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex-environment/macro/utilities \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex/environment/macro/utilities \
-Declipse.directory=%{_uframe_eclipse} \
-Dbaseline.dir=%{_baseline_workspace}
if [ $? -ne 0 ]; then
@ -92,7 +92,7 @@ if [ $? -ne 0 ]; then
fi
cd %{_baseline_workspace}/com.raytheon.wes2bridge.datalink
/awips2/ant/bin/ant -f build.xml \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex-environment/macro/utilities \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex/environment/macro/utilities \
-Declipse.directory=%{_uframe_eclipse} \
-Dbaseline.dir=%{_baseline_workspace}
if [ $? -ne 0 ]; then
@ -100,7 +100,7 @@ if [ $? -ne 0 ]; then
fi
cd %{_baseline_workspace}/com.raytheon.wes2bridge.manager
/awips2/ant/bin/ant -f build.xml \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex-environment/macro/utilities \
-Ddestination.directory=%{_build_root}%{_installation_directory}/edex/environment/macro/utilities \
-Declipse.directory=%{_uframe_eclipse} \
-Dbaseline.dir=%{_baseline_workspace}
if [ $? -ne 0 ]; then
@ -115,22 +115,22 @@ HTTPD_PYPIES_INITD="%{_baseline_workspace}/installers/RPMs/httpd-pypies/configur
# Copy the startup scripts.
cp ${POSTGRES_INITD} \
%{_build_root}%{_installation_directory}/edex-environment/scripts
%{_build_root}%{_installation_directory}/edex/environment/scripts
if [ $? -ne 0 ]; then
exit 1
fi
cp ${QPID_INITD} \
%{_build_root}%{_installation_directory}/edex-environment/scripts
%{_build_root}%{_installation_directory}/edex/environment/scripts
if [ $? -ne 0 ]; then
exit 1
fi
cp ${EDEX_INITD} \
%{_build_root}%{_installation_directory}/edex-environment/scripts
%{_build_root}%{_installation_directory}/edex/environment/scripts
if [ $? -ne 0 ]; then
exit 1
fi
cp ${HTTPD_PYPIES_INITD} \
%{_build_root}%{_installation_directory}/edex-environment/scripts
%{_build_root}%{_installation_directory}/edex/environment/scripts
if [ $? -ne 0 ]; then
exit 1
fi
@ -140,12 +140,12 @@ DELIVERABLES="${RPM_PROJECT}/awips2.core/Installer.edex-environment/wes2bridge.f
# Macro and functions.
cp ${DELIVERABLES}/scripts/edex-environment \
%{_build_root}%{_installation_directory}/edex-environment/macro
%{_build_root}%{_installation_directory}/edex/environment/macro
if [ $? -ne 0 ]; then
exit 1
fi
cp ${DELIVERABLES}/scripts/functions/*.sh \
%{_build_root}%{_installation_directory}/edex-environment/macro/functions
%{_build_root}%{_installation_directory}/edex/environment/macro/functions
if [ $? -ne 0 ]; then
exit 1
fi
@ -162,14 +162,14 @@ fi
%dir /usr/local/edex-environment
%defattr(644,root,root,755)
%dir /awips2/edex-environment
%dir /awips2/edex-environment/scripts
/awips2/edex-environment/scripts/*
%dir /awips2/edex-environment/macro
%dir /awips2/edex-environment/macro/utilities
/awips2/edex-environment/macro/utilities/*
%dir /awips2/edex-environment/macro/functions
/awips2/edex-environment/macro/functions/*
%dir /awips2/edex/environment
%dir /awips2/edex/environment/scripts
/awips2/edex/environment/scripts/*
%dir /awips2/edex/environment/macro
%dir /awips2/edex/environment/macro/utilities
/awips2/edex/environment/macro/utilities/*
%dir /awips2/edex/environment/macro/functions
/awips2/edex/environment/macro/functions/*
%defattr(700,root,root,755)
/awips2/edex-environment/macro/edex-environment
/awips2/edex/environment/macro/edex-environment

View file

@ -1,6 +1,6 @@
#!/bin/csh
set EDEX_ENV_PATH="/awips2/edex-environment/macro"
set EDEX_ENV_PATH="/awips2/edex/environment/macro"
if $?PATH then
setenv PATH ${EDEX_ENV_PATH}:$PATH

View file

@ -1,6 +1,6 @@
#!/bin/bash
EDEX_ENV_PATH="/awips2/edex-environment/macro"
EDEX_ENV_PATH="/awips2/edex/environment/macro"
CHECK_PATH=`echo ${PATH} | grep ${EDEX_ENV_PATH}`
if [ ! "${CHECK_PATH}" = "" ]; then

View file

@ -1,7 +1,7 @@
#!/bin/bash
# edex-environment environment
dir="/awips2/edex-environment/macro"
dir="/awips2/edex/environment/macro"
export EDEX_ENV_DIR="/usr/local/edex-environment"
export UTILITIES="${dir}/utilities"

View file

@ -1,61 +0,0 @@
#
# AWIPS II localapps Environment Spec File
#
Name: awips2-localapps-environment
Summary: AWIPS II localapps Environment Spec File
Version: %{_component_version}
Release: %{_component_release}
Group: AWIPSII
BuildRoot: /tmp
BuildArch: noarch
URL: N/A
License: N/A
Distribution: N/A
Vendor: %{_build_vendor}
Packager: %{_build_site}
AutoReq: no
Provides: awips2-localapps-environment
%description
AWIPS II localapps Environment Spec File - This rpm will install
a shell script in /etc/profile.d that when sourced will set all
of the environment variables required by the "localapps".
%prep
# Verify That The User Has Specified A BuildRoot.
if [ "${RPM_BUILD_ROOT}" = "/tmp" ]
then
echo "An Actual BuildRoot Must Be Specified. Use The --buildroot Parameter."
echo "Unable To Continue ... Terminating"
exit 1
fi
%build
%install
mkdir -p ${RPM_BUILD_ROOT}/etc/profile.d
if [ $? -ne 0 ]; then
exit 1
fi
LOCALAPPS_RPM_DIR="rpms/awips2.core/Installer.localapps-environment"
PROFILED_DIR="${LOCALAPPS_RPM_DIR}/scripts/profile.d"
# Copy the profile.d scripts.
cp %{_baseline_workspace}/${PROFILED_DIR}/* ${RPM_BUILD_ROOT}/etc/profile.d
if [ $? -ne 0 ]; then
exit 1
fi
%pre
%post
%preun
%postun
%clean
rm -rf ${RPM_BUILD_ROOT}
%files
%defattr(755,awips,fxalpha,-)
/etc/profile.d/*

View file

@ -1,18 +0,0 @@
#!/bin/csh
setenv LOCALAPPS_HOME "/localapps"
setenv LOCALAPPS_RUN "${LOCALAPPS_HOME}/runtime"
setenv LOCALAPPS_DEV "${LOCALAPPS_HOME}/dev"
setenv LOCALAPPS_LIB_java "${LOCALAPPS_HOME}/lib/java"
setenv LOCALAPPS_LIB_javascript "${LOCALAPPS_HOME}/lib/javascript"
setenv LOCALAPPS_LIB_perl "${LOCALAPPS_HOME}/lib/perl"
setenv LOCALAPPS_LIB_python "${LOCALAPPS_HOME}/lib/python"
setenv LOCALAPPS_LIB_shell "${LOCALAPPS_HOME}/lib/shell"
setenv LOCALAPPS_LIB_tcl "${LOCALAPPS_HOME}/lib/tcl"
setenv LOCALAPPS_LOGS "${LOCALAPPS_HOME}/logs"
if $?PYTHONPATH then
setenv PYTHONPATH /awips2/fxa/bin/src:$PYTHONPATH
else
setenv PYTHONPATH /awips2/fxa/bin/src
endif

View file

@ -1,23 +0,0 @@
#!/bin/bash
export LOCALAPPS_HOME=/localapps
export LOCALAPPS_RUN=${LOCALAPPS_HOME}/runtime
export LOCALAPPS_DEV=${LOCALAPPS_HOME}/dev
export LOCALAPPS_LIB_java=${LOCALAPPS_HOME}/lib/java
export LOCALAPPS_LIB_javascript=${LOCALAPPS_HOME}/lib/javascript
export LOCALAPPS_LIB_perl=${LOCALAPPS_HOME}/lib/perl
export LOCALAPPS_LIB_python=${LOCALAPPS_HOME}/lib/python
export LOCALAPPS_LIB_shell=${LOCALAPPS_HOME}/lib/shell
export LOCALAPPS_LIB_tcl=${LOCALAPPS_HOME}/lib/tcl
export LOCALAPPS_LOGS=${LOCALAPPS_HOME}/logs
CHECK_PATH=`echo ${PYTHONPATH} | grep "/awips2/fxa/bin/src"`
if [ ! "${CHECK_PATH}" = "" ]; then
return
fi
# Update PYTHONPATH
if [ "${PYTHONPATH}" = "" ]; then
export PYTHONPATH=/awips2/fxa/bin/src
else
export PYTHONPATH=/awips2/fxa/bin/src:${PYTHONPATH}
fi

View file

@ -16,7 +16,7 @@ Packager: %{_build_site}
AutoReq: no
Provides: %{_component_name}
Requires: awips2-edex, awips2-edex-radar, awips2-edex-upc
Requires: awips2-edex, awips2-edex-upc
Requires: awips2-edex-shapefiles
Obsoletes: awips2-localization-OAX < 16.1.4
@ -43,6 +43,7 @@ if [ -d ${RPM_BUILD_ROOT} ]; then
fi
%build
# Build all WFO site localization Map Scales (Regional.xml and WFO.xml)
BUILD_DIR=%{_baseline_workspace}/rpms/awips2.core/Installer.localization/
UTIL=%{_baseline_workspace}/localization/utility
@ -73,7 +74,6 @@ do
mkdir -p ~/awips2-builds/localization/localization/utility/cave_static/site/$site
cp -R $BUILD_DIR/utility/cave_static/* ~/awips2-builds/localization/localization/utility/cave_static/site/$site
grep -rl 'LOWX' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/LOWX/'$lowx'/g'
grep -rl 'HIGHX' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/HIGHX/'$highx'/g'
grep -rl 'LOWY' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/LOWY/'$lowy'/g'
@ -118,6 +118,7 @@ COMMON_DIR=$UTIL/common_static
mkdir -p $COMMON_DIR
cp -R $BUILD_DIR/utility/common_static/* $COMMON_DIR/
%install
if [ ! -d %{_baseline_workspace}/%{_localization_directory} ]; then
echo "ERROR: The specified localization directory does not exist - %{_localization_directory}."
@ -139,7 +140,15 @@ fi
%pre
%post
# only import the shapefiles and/or hydro databases, if we are on
# the same machine as the db.
# verify the following exists:
# 1) /awips2/data/maps
# 2) /awips2/postgresql/bin/postmaster
# 3) /awips2/postgresql/bin/pg_ctl
# 4) /awips2/psql/bin/psql
# 5) /awips2/database/sqlScripts/share/sql/maps/importShapeFile.sh
# 6) /awips2/postgresql/bin/pg_restore
if [ ! -d /awips2/data/maps ] ||
[ ! -f /awips2/postgresql/bin/postmaster ] ||
[ ! -f /awips2/postgresql/bin/pg_ctl ] ||
@ -161,57 +170,268 @@ chmod 666 ${log_file}
edex_utility="/awips2/edex/data/utility"
I_STARTED_POSTGRESQL="NO"
POSTGRESQL_RUNNING="NO"
a2_postmaster="/awips2/postgresql/bin/postmaster"
a2_pg_ctl="/awips2/postgresql/bin/pg_ctl"
DB_OWNER=`ls -l /awips2/ | grep -w 'data' | awk '{print $3}'`
echo "Determining if PostgreSQL is running ..." >> ${log_file}
su ${DB_OWNER} -c \
"${a2_pg_ctl} status -D /awips2/data >> ${log_file} 2>&1"
RC=$?
echo "" >> ${log_file}
# start PostgreSQL if it is not running as the user that owns data
if [ ${RC} -eq 0 ]; then
echo "INFO: PostgreSQL is running." >> ${log_file}
else
echo "Starting PostgreSQL as User: ${DB_OWNER} ..." >> ${log_file}
su ${DB_OWNER} -c \
"${a2_postmaster} -D /awips2/data >> ${log_file} 2>&1 &"
if [ $? -ne 0 ]; then
echo "FATAL: Failed to start PostgreSQL." >> ${log_file}
function prepare()
{
if [ "${POSTGRESQL_RUNNING}" = "YES" ]; then
return 0
fi
# give PostgreSQL time to start.
/bin/sleep 10
I_STARTED_POSTGRESQL="YES"
fi
POSTGRESQL_RUNNING="YES"
local a2_postmaster="/awips2/postgresql/bin/postmaster"
local a2_pg_ctl="/awips2/postgresql/bin/pg_ctl"
# retrieve the owner of the database
DB_OWNER=`ls -l /awips2/ | grep -w 'data' | awk '{print $3}'`
# determine if PostgreSQL is running
I_STARTED_POSTGRESQL="NO"
echo "Determining if PostgreSQL is running ..." >> ${log_file}
su ${DB_OWNER} -c \
"${a2_pg_ctl} status -D /awips2/data >> ${log_file} 2>&1"
RC=$?
echo "" >> ${log_file}
# start PostgreSQL if it is not running as the user that owns data
if [ ${RC} -eq 0 ]; then
echo "INFO: PostgreSQL is running." >> ${log_file}
else
echo "Starting PostgreSQL as User: ${DB_OWNER} ..." >> ${log_file}
su ${DB_OWNER} -c \
"${a2_postmaster} -D /awips2/data >> ${log_file} 2>&1 &"
if [ $? -ne 0 ]; then
echo "FATAL: Failed to start PostgreSQL." >> ${log_file}
return 0
fi
# give PostgreSQL time to start.
/bin/sleep 10
I_STARTED_POSTGRESQL="YES"
fi
POSTGRESQL_RUNNING="YES"
return 0
}
function restartPostgreSQL()
{
if [ "${POSTGRESQL_RUNNING}" = "NO" ]; then
return 0
fi
local a2_pg_ctl="/awips2/postgresql/bin/pg_ctl"
# retrieve the owner of the database
DB_OWNER=`ls -l /awips2/ | grep -w 'data' | awk '{print $3}'`
echo "Restarting PostgreSQL ..." >> ${log_file}
su ${DB_OWNER} -c \
"${a2_pg_ctl} restart -D /awips2/data" >> ${log_file}
sleep 20
echo "PostgreSQL restart complete ..." >> ${log_file}
}
function importShapefiles()
{
local site_directory="${edex_utility}/common_static/site/OAX"
# determine if we include ffmp shapefiles
local ffmp_shp_directory="${site_directory}/shapefiles/FFMP"
# if we do not, halt
if [ ! -d ${ffmp_shp_directory} ]; then
return 0
fi
# shapefiles exist
prepare
# verify the both the basins and streams shapefile are present.
if [ ! -f ${ffmp_shp_directory}/FFMP_aggr_basins.shp ] ||
[ ! -f ${ffmp_shp_directory}/FFMP_ref_sl.shp ]; then
# if they are not, exit
return 0
fi
# verify that the files the streams and basins shapefile depend on
# are present.
if [ ! -f ${ffmp_shp_directory}/FFMP_aggr_basins.dbf ] ||
[ ! -f ${ffmp_shp_directory}/FFMP_aggr_basins.shx ] ||
[ ! -f ${ffmp_shp_directory}/FFMP_ref_sl.dbf ] ||
[ ! -f ${ffmp_shp_directory}/FFMP_ref_sl.shx ]; then
# if they are not, exit
return 0
fi
local a2_shp_script="/awips2/database/sqlScripts/share/sql/maps/importShapeFile.sh"
echo "Importing the FFMP Shapefiles ... Please Wait."
/bin/date >> ${log_file}
echo "Preparing to import the FFMP shapefiles ..." >> ${log_file}
echo "" >> ${log_file}
# import the shapefiles; log the output
# import the ffmp basins
/bin/bash ${a2_shp_script} \
${ffmp_shp_directory}/FFMP_aggr_basins.shp \
mapdata ffmp_basins 0.064,0.016,0.004,0.001 \
awips 5432 /awips2 >> ${log_file} 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: failed to import the FFMP basins." >> ${log_file}
return 0
fi
# import the ffmp streams
/bin/bash ${a2_shp_script} \
${ffmp_shp_directory}/FFMP_ref_sl.shp \
mapdata ffmp_streams 0.064,0.016,0.004,0.001 \
awips 5432 /awips2 >> ${log_file} 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: failed to import the FFMP streams." >> ${log_file}
return 0
fi
# indicate success
echo "INFO: The FFMP shapefiles were successfully imported." >> ${log_file}
return 0
}
function removeHydroDbDirectory()
{
# remove the hydro db directory since it is not officially part
# of the localization.
local site_directory="${edex_utility}/common_static/site/OAX"
local hydro_db_directory="${site_directory}/hydro/db"
if [ -d ${hydro_db_directory} ]; then
rm -rf ${hydro_db_directory}
if [ $? -ne 0 ]; then
echo "WARNING: Failed to remove hydro db directory from localization."
echo " Please remove directory manually: ${hydro_db_directory}."
fi
fi
return 0
}
function restoreHydroDb()
{
local site_directory="${edex_utility}/common_static/site/OAX"
# determine if we include the hydro databases
local hydro_db_directory="${site_directory}/hydro/db"
# if we do not, halt
if [ ! -d ${hydro_db_directory} ]; then
return 0
fi
# hydro databases exist
prepare
# verify that the hydro database definition is present
if [ ! -f ${hydro_db_directory}/hydroDatabases.sh ]; then
return 0
fi
# discover the hydro databases
source ${hydro_db_directory}/hydroDatabases.sh
# ensure that the expected information has been provided
if [ "${DAMCAT_DATABASE}" = "" ] ||
[ "${DAMCAT_SQL_DUMP}" = "" ] ||
[ "${IHFS_DATABASE}" = "" ] ||
[ "${IHFS_SQL_DUMP}" = "" ]; then
echo "Sufficient information has not been provided for the Hydro Restoration!" \
>> ${log_file}
return 0
fi
# ensure that the specified databases are available for import
if [ ! -f ${hydro_db_directory}/${DAMCAT_DATABASE} ] ||
[ ! -f ${hydro_db_directory}/${IHFS_DATABASE} ]; then
echo "The expected Hydro Database Exports are not present!" >> ${log_file}
return 0
fi
# update pg_hba.conf
local default_damcat="dc_ob7oax"
local default_ihfs="hd_ob83oax"
local pg_hba_conf="/awips2/data/pg_hba.conf"
# update the entry for the damcat database
perl -p -i -e "s/${default_damcat}/${DAMCAT_DATABASE}/g" ${pg_hba_conf}
if [ $? -ne 0 ]; then
echo "Failed to update damcat database in ${pg_hba_conf}!" >> ${log_file}
return 0
fi
# update the entry for the ihfs database
perl -p -i -e "s/${default_ihfs}/${IHFS_DATABASE}/g" ${pg_hba_conf}
if [ $? -ne 0 ]; then
echo "Failed to update ihfs database in ${pg_hba_conf}!" >> ${log_file}
return 0
fi
# prepare PostgreSQL
restartPostgreSQL
echo "Restoring the Hydro Databases ... Please Wait."
/bin/date >> ${log_file}
echo "Preparing to restore the Hydro databases ..." >> ${log_file}
local a2_pg_restore="/awips2/postgresql/bin/pg_restore"
# perform the restoration
echo "Restoring Database ${DAMCAT_DATABASE} ..." >> ${log_file}
${a2_pg_restore} -U awips -C -d postgres ${hydro_db_directory}/${DAMCAT_DATABASE} \
>> ${log_file} 2>&1
# do not check the return code because any errors encountered during
# the restoration may cause the return code to indicate a failure even
# though the database was successfully restored.
echo "" >> ${log_file}
echo "Restoring Database ${IHFS_DATABASE} ..." >> ${log_file}
${a2_pg_restore} -U awips -C -d postgres ${hydro_db_directory}/${IHFS_DATABASE} \
>> ${log_file} 2>&1
# do not check the return code because any errors encountered during
# the restoration may cause the return code to indicate a failure even
# though the database was successfully restored.
# indicate success
echo "INFO: The Hydro databases were successfully restored." >> ${log_file}
}
importShapefiles
restoreHydroDb
removeHydroDbDirectory
static_shp_directory="${edex_utility}/edex_static/base/shapefiles"
a2_shp_script="/awips2/database/sqlScripts/share/sql/maps/importShapeFile.sh"
/bin/bash ${a2_shp_script} \
${static_shp_directory}/NHAdomain/NHAdomain.shp mapdata nhadomain >> ${log_file} 2>&1
${static_shp_directory}/NHAdomain/NHAdomain.shp mapdata nhadomain >> ${log_file} 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: failed to import NHAdomain." >> ${log_file}
return 0
fi
/bin/bash ${a2_shp_script} \
${static_shp_directory}/StormSurgeWW/StormSurgeWW.shp mapdata stormsurgeww >> ${log_file} 2>&1
${static_shp_directory}/StormSurgeWW/StormSurgeWW.shp mapdata stormsurgeww >> ${log_file} 2>&1
if [ $? -ne 0 ]; then
echo "FATAL: failed to import StormSurgeWW." >> ${log_file}
return 0
fi
echo "INFO: NHAdomain and StormSurgeWW shapefiles were successfully imported." >> ${log_file}
PSQL="/awips2/psql/bin/psql"
echo "Updating metadata.radar_spatial from common_static/base/radar/radarSpatial.sql"
${PSQL} -U awips -d metadata -q -f /awips2/edex/data/utility/common_static/base/radar/radarSpatial.sql >> ${log_file} 2>&1
a2_pg_ctl="/awips2/postgresql/bin/pg_ctl"
# if we started PostgreSQL, shutdown PostgreSQL
if [ "${I_STARTED_POSTGRESQL}" = "YES" ]; then
echo "" >> ${log_file}
su ${DB_OWNER} -c \
"${a2_pg_ctl} stop -D /awips2/data" >> ${log_file}
if [ $? -ne 0 ]; then

View file

@ -8,7 +8,7 @@
Name: awips2-tools
Summary: AWIPS II Tools Distribution
Version: %{_component_version}
Release: 1.8.5%{?dist}
Release: 1%{?dist}
Group: AWIPSII
BuildRoot: %{_build_root}
BuildArch: %{_build_arch}

View file

@ -16,7 +16,7 @@
Name: %{_component_name}
Summary: awips2 Installation
Version: %{_component_version}
Release: %{_component_release}%{?dist}
Release: %{_component_release}
Group: AWIPSII
BuildRoot: /tmp
BuildArch: noarch

View file

@ -160,7 +160,6 @@ cd ../
buildRPM "Installer.version"
buildRPM "Installer.gfesuite-client"
buildRPM "Installer.gfesuite-server"
buildRPM "Installer.database-standalone-configuration"
buildRPM "Installer.adapt-native"
buildRPM "Installer.alertviz"
buildRPM "Installer.aviation"
@ -173,8 +172,6 @@ buildRPM "Installer.gfe"
buildRPM "Installer.hydroapps"
buildRPM "Installer.notification"
buildRPM "Installer.pypies"
buildRPM "Installer.rcm"
buildRPM "Installer.localapps-environment"
buildRPM "Installer.edex-configuration"
unset AWIPSII_VERSION

View file

@ -4,7 +4,7 @@
Name: awips2-edex-shapefiles
Summary: AWIPS II Edex
Version: %{_component_version}
Release: %{_component_release}%{?dist}
Release: %{_component_release}
Group: AWIPSII
BuildRoot: %{_build_root}
BuildArch: noarch

View file

@ -15,7 +15,7 @@ Vendor: %{_build_vendor}
Packager: %{_build_site}
AutoReq: no
Requires: awips2-tools, awips2-edex-configuration, awips2-database-standalone-configuration
Requires: awips2-tools, awips2-edex-configuration, awips2-database
Requires: awips2-postgresql, awips2-ldm
Requires: ksh, libstdc++(x86-32), libgfortran(x86-32)

View file

@ -207,7 +207,7 @@ edit_pg() { # edex pg_hba.conf
schStr=`grep "\/24" $PG_FILE | head -1 | awk '{ print $4 }' | cut -d/ -f1`
subStr=$IP_CIDR'.0'
sed -i.setup_$YMD 's/'$schStr'/'$subStr'/g' $PG_FILE
echo '[edit] '$subStr' added to '$iPG_FILE
echo '[edit] '$subStr' added to '$PG_FILE
echo ' File backed up to '$PG_FILE'.setup_'$YMD
}

View file

@ -179,10 +179,6 @@ function lookupRPM()
export RPM_SPECIFICATION="${awips2_core_dir}/Installer.hydroapps"
return 0
fi
if [ "${1}" = "awips2-localapps-environment" ]; then
export RPM_SPECIFICATION="${awips2_core_dir}/Installer.localapps-environment"
return 0
fi
if [ "${1}" = "-localization" ]; then
export RPM_SPECIFICATION="${awips2_core_dir}/Installer.localization"
return 0

View file

@ -138,11 +138,12 @@ if [ "${1}" = "-python" ]; then
fi
if [ "${1}" = "-database" ]; then
buildRPM "awips2-database-standalone-configuration"
buildRPM "awips2-database"
buildRPM "awips2-postgresql"
#buildRPM "awips2-database"
#buildRPM "awips2-postgresql"
buildRPM "awips2-maps-database"
buildRPM "awips2-ncep-database"
#buildRPM "awips2-data.hdf5-topo"
#buildRPM "awips2-data.gfe"
fi
if [ "${1}" = "-edex" ]; then
@ -152,8 +153,35 @@ if [ "${1}" = "-edex" ]; then
buildRPM "awips2-edex-upc"
fi
if [ "${1}" = "-rh6" ]; then
if [ "${1}" = "-server" ]; then
#buildEDEX
buildRPM "awips2"
buildRPM "awips2-java"
buildRPM "awips2-pypies"
buildRPM "awips2-python-awips"
buildRPM "awips2-python-gfe"
buildRPM "awips2-ldm"
buildRPM "awips2-tools"
#buildRPM "awips2-notification"
buildRPM "awips2-hydroapps-shared"
buildRPM "awips2-common-base"
buildRPM "awips2-httpd-pypies"
buildLocalizationRPMs
buildRPM "awips2-adapt-native"
buildRPM "awips2-aviation-shared"
buildRPM "awips2-cli"
buildRPM "awips2-edex-environment"
buildRPM "awips2-edex-shapefiles"
buildRPM "awips2-edex-upc"
buildRPM "awips2-data.gfe"
buildRPM "awips2-gfesuite-client"
buildRPM "awips2-gfesuite-server"
buildRPM "awips2-data.hdf5-topo"
buildRPM "awips2-yajsw"
fi
if [ "${1}" = "-rh6" ]; then
##buildEDEX
#buildRPM "awips2"
#buildRPM "awips2-java"
#buildRPM "awips2-pypies"
@ -162,8 +190,8 @@ if [ "${1}" = "-rh6" ]; then
#buildRPM "awips2-ldm"
#buildRPM "awips2-tools"
#buildRPM "awips2-notification"
buildRPM "awips2-hydroapps-shared"
buildRPM "awips2-postgresql"
#buildRPM "awips2-hydroapps-shared"
##buildRPM "awips2-postgresql"
buildRPM "awips2-common-base"
buildRPM "awips2-ant"
buildRPM "awips2-maven"
@ -172,7 +200,6 @@ if [ "${1}" = "-rh6" ]; then
buildRPM "awips2-qpid-lib"
buildRPM "awips2-qpid-java"
buildRPM "awips2-qpid-java-broker"
buildRPM "awips2-database-standalone-configuration"
buildRPM "awips2-database"
buildRPM "awips2-postgresql"
buildRPM "awips2-maps-database"
@ -188,7 +215,6 @@ if [ "${1}" = "-rh6" ]; then
buildRPM "awips2-gfesuite-client"
buildRPM "awips2-gfesuite-server"
buildRPM "awips2-groovy"
buildRPM "awips2-localapps-environment"
buildRPM "awips2-data.hdf5-topo"
buildRPM "awips2-yajsw"
exit 0

View file

@ -1,8 +1,8 @@
#!/bin/bash -v
set -xe
#if [ ${2} = "buildCAVE"]; then
if [ ${2} = "buildCAVE"]; then
rm -rf /awips2/jenkins/buildspace/workspace/AWIPS2-UPC_build/baseline/
#fi
fi
export JENKINS_WORKSPACE=/awips2/repo/awips2-builds
# determine where we are ...

View file

@ -13,6 +13,15 @@ javaUtilities/* rpms pythonPackages nativeLib/*
../awips2-nws/viz/*
../awips2-nws/features/*
../awips2-nws/edex/*
../awips2-nasa/edex/*
../awips2-nasa/features/*
../awips2-cimss/common/*
../awips2-cimss/viz/*
../awips2-cimss/features/*
../awips2-cimss/edex/*
../awips2-ohd/lib/*
../awips2-ohd/features/*
../awips2-ohd/edex/*
../awips2-ncep/common/*
../awips2-ncep/viz/*
../awips2-ncep/features/*
@ -21,3 +30,5 @@ javaUtilities/* rpms pythonPackages nativeLib/*
../awips2-gsd/viz/*
../awips2-goesr/cave/*
../awips2-goesr/edexOsgi/*
../awips2-drawing/features/*
../awips2-drawing/viz/*