Merge branch 'master_14.4.1' into field_14.4.1
Change-Id: I4ffce1e09a6ad2c8a2428907a8b8e2bcedcefe4c Former-commit-id:0fa5c4e483
[formerly ea2276a9ac116613b032828e4a4b82f67fc6ba8d] Former-commit-id:b377106409
This commit is contained in:
commit
6749c3e52f
242 changed files with 12214 additions and 22643 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,6 +7,7 @@ testBin/
|
||||||
bin-test/
|
bin-test/
|
||||||
*.class
|
*.class
|
||||||
*.pyo
|
*.pyo
|
||||||
|
*.pyc
|
||||||
*.o
|
*.o
|
||||||
*.orig
|
*.orig
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/ConvSigmet.xml"
|
||||||
|
menuText="Convective SIGMET" id="ConvSigmet">
|
||||||
|
</contribute>
|
||||||
|
</menuTemplate>
|
|
@ -0,0 +1,34 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml"
|
||||||
|
menuText="SIGMET" id="SIGMETICING">
|
||||||
|
<substitute key="hazardType" value="ICING" />
|
||||||
|
<substitute key="color" value="#FFFFFF" />
|
||||||
|
<substitute key="name" value="Icing SIGMET" />
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml"
|
||||||
|
menuText="AIRMET" id="AIRMETICING">
|
||||||
|
<substitute key="hazardType" value="ICING" />
|
||||||
|
<substitute key="color" value="#0000FF" />
|
||||||
|
<substitute key="name" value="Icing AIRMET" />
|
||||||
|
</contribute>
|
||||||
|
</menuTemplate>
|
|
@ -0,0 +1,28 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml"
|
||||||
|
menuText="SIGMET" id="SIGMETTTOPCYCLONE">
|
||||||
|
<substitute key="hazardType" value="TROPICAL CYCLONE" />
|
||||||
|
<substitute key="color" value="#00FFFF" />
|
||||||
|
<substitute key="name" value="Tropical Cyclone SIGMET" />
|
||||||
|
</contribute>
|
||||||
|
</menuTemplate>
|
|
@ -0,0 +1,34 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml"
|
||||||
|
menuText="SIGMET" id="SIGMETTURB">
|
||||||
|
<substitute key="hazardType" value="TURBULENCE" />
|
||||||
|
<substitute key="color" value="#FF4444" />
|
||||||
|
<substitute key="name" value="Turbulence SIGMET" />
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml"
|
||||||
|
menuText="AIRMET" id="AIRMETTURB">
|
||||||
|
<substitute key="hazardType" value="TURBULENCE" />
|
||||||
|
<substitute key="color" value="#00FF00" />
|
||||||
|
<substitute key="name" value="Turbulence AIRMET" />
|
||||||
|
</contribute>
|
||||||
|
</menuTemplate>
|
|
@ -0,0 +1,36 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<contribute xsi:type="subMenu" menuText="AIRMET">
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml"
|
||||||
|
menuText="IFR" id="AIRMETIFR">
|
||||||
|
<substitute key="hazardType" value="INSTRUMENT FLIGHT RULES" />
|
||||||
|
<substitute key="color" value="#00FFFF" />
|
||||||
|
<substitute key="name" value="IFR AIRMET" />
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml"
|
||||||
|
menuText="Mtn Obscn" id="AIRMETMTNOBSC">
|
||||||
|
<substitute key="hazardType" value="MOUNTAIN OBSCURATION" />
|
||||||
|
<substitute key="color" value="#FF0088" />
|
||||||
|
<substitute key="name" value="Mtn Obscn AIRMET" />
|
||||||
|
</contribute>
|
||||||
|
</contribute>
|
||||||
|
</menuTemplate>
|
|
@ -0,0 +1,28 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml"
|
||||||
|
menuText="SIGMET" id="SIGMETVOLCANICASH">
|
||||||
|
<substitute key="hazardType" value="VOLCANIC ASH CLOUD" />
|
||||||
|
<substitute key="color" value="#00FF00" />
|
||||||
|
<substitute key="name" value="Volcanic Ash SIGMET" />
|
||||||
|
</contribute>
|
||||||
|
</menuTemplate>
|
|
@ -0,0 +1,34 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<!--
|
||||||
|
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||||
|
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||||
|
|
||||||
|
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||||
|
This_software_product_contains_export-restricted_data_whose
|
||||||
|
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||||
|
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||||
|
an_export_license_or_other_authorization.
|
||||||
|
|
||||||
|
Contractor_Name:________Raytheon_Company
|
||||||
|
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||||
|
________________________Mail_Stop_B8
|
||||||
|
________________________Omaha,_NE_68106
|
||||||
|
________________________402.291.0100
|
||||||
|
|
||||||
|
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||||
|
further_licensing_information.
|
||||||
|
-->
|
||||||
|
<menuContributionFile>
|
||||||
|
<include installTo="menu:Aviation?after=ConvectionProductsStart"
|
||||||
|
fileName="menus/aviationadvisory/baseAviationConvectionProducts.xml" />
|
||||||
|
<include installTo="menu:Aviation?before=IcingProductsEnd"
|
||||||
|
fileName="menus/aviationadvisory/baseAviationIcingProducts.xml" />
|
||||||
|
<include installTo="menu:Aviation?before=TurbulenceProductsEnd"
|
||||||
|
fileName="menus/aviationadvisory/baseAviationTurbulenceProducts.xml" />
|
||||||
|
<include installTo="menu:Aviation?before=VisibilityProductsEnd"
|
||||||
|
fileName="menus/aviationadvisory/baseAviationVisibilityProducts.xml" />
|
||||||
|
<include installTo="menu:Aviation?before=TropicalCycloneEnd"
|
||||||
|
fileName="menus/aviationadvisory/baseAviationTropicalCyclone.xml" />
|
||||||
|
<include installTo="menu:Aviation?after=VolcanicAshStart"
|
||||||
|
fileName="menus/aviationadvisory/baseAviationVolcanicAsh.xml" />
|
||||||
|
</menuContributionFile>
|
|
@ -2,6 +2,5 @@
|
||||||
<classpath>
|
<classpath>
|
||||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
|
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
|
||||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||||
<classpathentry kind="src" path="src"/>
|
|
||||||
<classpathentry kind="output" path="bin"/>
|
<classpathentry kind="output" path="bin"/>
|
||||||
</classpath>
|
</classpath>
|
||||||
|
|
|
@ -2,15 +2,13 @@ Manifest-Version: 1.0
|
||||||
Bundle-ManifestVersion: 2
|
Bundle-ManifestVersion: 2
|
||||||
Bundle-Name: D2D Upper Air
|
Bundle-Name: D2D Upper Air
|
||||||
Bundle-SymbolicName: com.raytheon.uf.viz.d2d.ui.upperair;singleton:=true
|
Bundle-SymbolicName: com.raytheon.uf.viz.d2d.ui.upperair;singleton:=true
|
||||||
Bundle-Version: 1.0.0.qualifier
|
Bundle-Version: 1.14.0.qualifier
|
||||||
Bundle-Activator: com.raytheon.uf.viz.d2d.ui.upperair.Activator
|
|
||||||
Bundle-Vendor: RAYTHEON
|
Bundle-Vendor: RAYTHEON
|
||||||
Require-Bundle: org.eclipse.core.runtime,
|
Require-Bundle: org.eclipse.core.runtime,
|
||||||
com.raytheon.uf.viz.d2d.ui;bundle-version="1.12.1174",
|
com.raytheon.uf.viz.d2d.ui;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.viz.d2d.nsharp;bundle-version="1.0.0",
|
com.raytheon.uf.viz.d2d.nsharp;bundle-version="1.0.0",
|
||||||
com.raytheon.uf.viz.profiler;bundle-version="1.12.1174",
|
com.raytheon.uf.viz.profiler;bundle-version="1.12.1174",
|
||||||
com.raytheon.viz.pointdata;bundle-version="1.12.1174",
|
com.raytheon.viz.pointdata;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.viz.aviation.advisory;bundle-version="1.12.1174",
|
|
||||||
com.raytheon.uf.viz.vaa;bundle-version="1.12.1174",
|
com.raytheon.uf.viz.vaa;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.viz.cwa;bundle-version="1.12.1174",
|
com.raytheon.uf.viz.cwa;bundle-version="1.12.1174",
|
||||||
com.raytheon.uf.viz.bufrsigwx;bundle-version="1.12.1174",
|
com.raytheon.uf.viz.bufrsigwx;bundle-version="1.12.1174",
|
||||||
|
@ -20,5 +18,5 @@ Require-Bundle: org.eclipse.core.runtime,
|
||||||
com.raytheon.uf.viz.ncwf;bundle-version="1.12.1174",
|
com.raytheon.uf.viz.ncwf;bundle-version="1.12.1174",
|
||||||
com.raytheon.viz.lpi;bundle-version="1.12.1174",
|
com.raytheon.viz.lpi;bundle-version="1.12.1174",
|
||||||
com.raytheon.viz.spi;bundle-version="1.12.1174"
|
com.raytheon.viz.spi;bundle-version="1.12.1174"
|
||||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
Bundle-RequiredExecutionEnvironment: JavaSE-1.7
|
||||||
Bundle-ActivationPolicy: lazy
|
Bundle-ActivationPolicy: lazy
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
source.. = src/
|
|
||||||
output.. = bin/
|
|
||||||
bin.includes = META-INF/,\
|
bin.includes = META-INF/,\
|
||||||
.,\
|
.,\
|
||||||
localization/,\
|
localization/,\
|
||||||
|
|
|
@ -19,86 +19,54 @@
|
||||||
further_licensing_information.
|
further_licensing_information.
|
||||||
-->
|
-->
|
||||||
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
<menuTemplate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
<contribute xsi:type="subMenu" menuText="Aviation">
|
<contribute xsi:type="subMenu" id="Aviation" menuText="Aviation">
|
||||||
<contribute xsi:type="titleItem" titleText="------ Convection Products ------" />
|
<contribute xsi:type="titleItem"
|
||||||
<contribute xsi:type="bundleItem" file="bundles/ConvSigmet.xml"
|
titleText="------ Convection Products ------" />
|
||||||
menuText="Convective SIGMET" id="ConvSigmet">
|
<contribute xsi:type="separator" id="ConvectionProductsStart"
|
||||||
</contribute>
|
visible="false" />
|
||||||
<contribute xsi:type="bundleItem" file="bundles/BufrNcwf.xml" menuText="NCWF" id="NCWF">
|
<contribute xsi:type="bundleItem" file="bundles/BufrNcwf.xml"
|
||||||
</contribute>
|
menuText="NCWF" id="NCWF">
|
||||||
<contribute xsi:type="separator" id="separator1"/>
|
</contribute>
|
||||||
|
<contribute xsi:type="separator" id="ConvectionProductsEnd" />
|
||||||
<contribute xsi:type="titleItem" titleText="------ Icing Products ------" />
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml" menuText="SIGMET" id="SIGMETICING">
|
|
||||||
<substitute key="hazardType" value="ICING"/>
|
|
||||||
<substitute key="color" value="#FFFFFF"/>
|
|
||||||
<substitute key="name" value="Icing SIGMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml" menuText="AIRMET" id="AIRMETICING">
|
|
||||||
<substitute key="hazardType" value="ICING"/>
|
|
||||||
<substitute key="color" value="#0000FF"/>
|
|
||||||
<substitute key="name" value="Icing AIRMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="separator" id="separator2"/>
|
|
||||||
|
|
||||||
<contribute xsi:type="titleItem" titleText="------ Turbulence Products ------" />
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml" menuText="SIGMET" id="SIGMETTURB">
|
|
||||||
<substitute key="hazardType" value="TURBULENCE"/>
|
|
||||||
<substitute key="color" value="#FF4444"/>
|
|
||||||
<substitute key="name" value="Turbulence SIGMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml" menuText="AIRMET" id="AIRMETTURB">
|
|
||||||
<substitute key="hazardType" value="TURBULENCE"/>
|
|
||||||
<substitute key="color" value="#00FF00"/>
|
|
||||||
<substitute key="name" value="Turbulence AIRMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="separator" id="separator3"/>
|
|
||||||
|
|
||||||
<contribute xsi:type="titleItem" titleText="------ Visibility Products ------" />
|
|
||||||
<contribute xsi:type="subMenu" menuText="AIRMET">
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml" menuText="IFR" id="AIRMETIFR">
|
|
||||||
<substitute key="hazardType" value="INSTRUMENT FLIGHT RULES"/>
|
|
||||||
<substitute key="color" value="#00FFFF"/>
|
|
||||||
<substitute key="name" value="IFR AIRMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/Airmet.xml" menuText="Mtn Obscn" id="AIRMETMTNOBSC">
|
|
||||||
<substitute key="hazardType" value="MOUNTAIN OBSCURATION"/>
|
|
||||||
<substitute key="color" value="#FF0088"/>
|
|
||||||
<substitute key="name" value="Mtn Obscn AIRMET"/>
|
|
||||||
</contribute>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="separator" id="separator4"/>
|
|
||||||
|
|
||||||
<contribute xsi:type="titleItem" titleText="------ Tropical Cyclone ------" />
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml" menuText="SIGMET" id="SIGMETTTOPCYCLONE">
|
|
||||||
<substitute key="hazardType" value="TROPICAL CYCLONE"/>
|
|
||||||
<substitute key="color" value="#00FFFF"/>
|
|
||||||
<substitute key="name" value="Tropical Cyclone SIGMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="separator" id="separator5"/>
|
|
||||||
|
|
||||||
<contribute xsi:type="titleItem" titleText="------ Volcanic Ash ------" />
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/NonConvSigmet.xml" menuText="SIGMET" id="SIGMETVOLCANICASH">
|
|
||||||
<substitute key="hazardType" value="VOLCANIC ASH CLOUD"/>
|
|
||||||
<substitute key="color" value="#00FF00"/>
|
|
||||||
<substitute key="name" value="Volcanic Ash SIGMET"/>
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/VAA.xml" menuText="Volcanic Ash Advisories" id="VAA">
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="separator" id="separator6"/>
|
|
||||||
|
|
||||||
<contribute xsi:type="titleItem" titleText="------Center Weather ------" />
|
|
||||||
<contribute xsi:type="bundleItem" file="bundles/CWA.xml" menuText="Center Weather Advisories" id="cwa">
|
|
||||||
</contribute>
|
|
||||||
<contribute xsi:type="separator" id="separator7"/>
|
|
||||||
|
|
||||||
<contribute xsi:type="titleItem" titleText="------Significant Weather------" />
|
<contribute xsi:type="titleItem" titleText="------ Icing Products ------" />
|
||||||
<contribute xsi:type="bundleItem" file="bundles/BufrSigWx.xml" menuText="Medium Level" id="SigWxMedium">
|
<contribute xsi:type="separator" id="IcingProductsEnd" />
|
||||||
<substitute key="wxLayer" value="SWM"/>
|
|
||||||
</contribute>
|
<contribute xsi:type="titleItem"
|
||||||
<contribute xsi:type="bundleItem" file="bundles/BufrSigWx.xml" menuText="High Level" id="SigWxHigh">
|
titleText="------ Turbulence Products ------" />
|
||||||
<substitute key="wxLayer" value="SWH"/>
|
<contribute xsi:type="separator" id="TurbulenceProductsEnd" />
|
||||||
</contribute>
|
|
||||||
</contribute>
|
<contribute xsi:type="titleItem"
|
||||||
<contribute xsi:type="separator" id="separator8"/>
|
titleText="------ Visibility Products ------" />
|
||||||
|
<contribute xsi:type="separator" id="VisibilityProductsEnd" />
|
||||||
|
|
||||||
|
<contribute xsi:type="titleItem" titleText="------ Tropical Cyclone ------" />
|
||||||
|
<contribute xsi:type="separator" id="TropicalCycloneEnd" />
|
||||||
|
|
||||||
|
<contribute xsi:type="titleItem" titleText="------ Volcanic Ash ------" />
|
||||||
|
<contribute xsi:type="separator" id="VolcanicAshStart"
|
||||||
|
visible="false" />
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/VAA.xml"
|
||||||
|
menuText="Volcanic Ash Advisories" id="VAA">
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="separator" id="VolcanicAshEnd" />
|
||||||
|
|
||||||
|
<contribute xsi:type="titleItem" titleText="------Center Weather ------" />
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/CWA.xml"
|
||||||
|
menuText="Center Weather Advisories" id="cwa">
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="separator" id="CenterWeatherEnd" />
|
||||||
|
|
||||||
|
<contribute xsi:type="titleItem"
|
||||||
|
titleText="------Significant Weather------" />
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/BufrSigWx.xml"
|
||||||
|
menuText="Medium Level" id="SigWxMedium">
|
||||||
|
<substitute key="wxLayer" value="SWM" />
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="bundleItem" file="bundles/BufrSigWx.xml"
|
||||||
|
menuText="High Level" id="SigWxHigh">
|
||||||
|
<substitute key="wxLayer" value="SWH" />
|
||||||
|
</contribute>
|
||||||
|
</contribute>
|
||||||
|
<contribute xsi:type="separator" id="AviationMenuEnd" />
|
||||||
</menuTemplate>
|
</menuTemplate>
|
|
@ -1,30 +0,0 @@
|
||||||
package com.raytheon.uf.viz.d2d.ui.upperair;
|
|
||||||
|
|
||||||
import org.osgi.framework.BundleActivator;
|
|
||||||
import org.osgi.framework.BundleContext;
|
|
||||||
|
|
||||||
public class Activator implements BundleActivator {
|
|
||||||
|
|
||||||
private static BundleContext context;
|
|
||||||
|
|
||||||
static BundleContext getContext() {
|
|
||||||
return context;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
* @see org.osgi.framework.BundleActivator#start(org.osgi.framework.BundleContext)
|
|
||||||
*/
|
|
||||||
public void start(BundleContext bundleContext) throws Exception {
|
|
||||||
Activator.context = bundleContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
* @see org.osgi.framework.BundleActivator#stop(org.osgi.framework.BundleContext)
|
|
||||||
*/
|
|
||||||
public void stop(BundleContext bundleContext) throws Exception {
|
|
||||||
Activator.context = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -21,6 +21,7 @@
|
||||||
<import feature="com.raytheon.uf.viz.cots.feature" version="1.0.0.qualifier"/>
|
<import feature="com.raytheon.uf.viz.cots.feature" version="1.0.0.qualifier"/>
|
||||||
<import feature="com.raytheon.uf.viz.common.core.feature" version="1.0.0.qualifier"/>
|
<import feature="com.raytheon.uf.viz.common.core.feature" version="1.0.0.qualifier"/>
|
||||||
<import feature="com.raytheon.uf.viz.core.feature" version="1.0.0.qualifier"/>
|
<import feature="com.raytheon.uf.viz.core.feature" version="1.0.0.qualifier"/>
|
||||||
|
<import feature="com.raytheon.uf.viz.d2d.nsharp.feature" version="1.0.0.qualifier"/>
|
||||||
</requires>
|
</requires>
|
||||||
|
|
||||||
<plugin
|
<plugin
|
||||||
|
|
|
@ -66,7 +66,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.common.util.FileUtil;
|
import com.raytheon.uf.common.util.FileUtil;
|
||||||
import com.raytheon.uf.viz.spellchecker.Activator;
|
|
||||||
import com.raytheon.uf.viz.spellchecker.jobs.SpellCheckJob;
|
import com.raytheon.uf.viz.spellchecker.jobs.SpellCheckJob;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -79,6 +78,7 @@ import com.raytheon.uf.viz.spellchecker.jobs.SpellCheckJob;
|
||||||
* 18 APR 2008 ### lvenable Initial creation
|
* 18 APR 2008 ### lvenable Initial creation
|
||||||
* 01Mar2010 4765 MW Fegan Moved from GFE plug-in.
|
* 01Mar2010 4765 MW Fegan Moved from GFE plug-in.
|
||||||
* 09/24/2014 #16693 lshi filter out swear words in spelling check
|
* 09/24/2014 #16693 lshi filter out swear words in spelling check
|
||||||
|
* 10/23/2014 #3685 randerso Changes to support mixed case
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -87,9 +87,10 @@ import com.raytheon.uf.viz.spellchecker.jobs.SpellCheckJob;
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
private static java.util.List<String> swearWords = Arrays.asList("ASSHOLE");
|
private static java.util.List<String> swearWords = Arrays.asList("ASSHOLE");
|
||||||
|
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus.getHandler(SpellCheckDlg.class);
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
|
.getHandler(SpellCheckDlg.class);
|
||||||
|
|
||||||
private static final Pattern DIGITS = Pattern.compile("\\d");
|
private static final Pattern DIGITS = Pattern.compile("\\d");
|
||||||
|
|
||||||
|
@ -331,6 +332,7 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
* org.eclipse.ui.texteditor.spelling.ISpellingProblemCollector#accept(org
|
* org.eclipse.ui.texteditor.spelling.ISpellingProblemCollector#accept(org
|
||||||
* .eclipse.ui.texteditor.spelling.SpellingProblem)
|
* .eclipse.ui.texteditor.spelling.SpellingProblem)
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public void accept(SpellingProblem problem) {
|
public void accept(SpellingProblem problem) {
|
||||||
if (shell.isDisposed()) {
|
if (shell.isDisposed()) {
|
||||||
return;
|
return;
|
||||||
|
@ -345,15 +347,16 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
misspelledLbl.setText(badWord);
|
misspelledLbl.setText(badWord);
|
||||||
|
|
||||||
ICompletionProposal[] proposals = problem.getProposals();
|
ICompletionProposal[] proposals = problem.getProposals();
|
||||||
if (proposals != null && proposals.length > 0) {
|
if ((proposals != null) && (proposals.length > 0)) {
|
||||||
for (ICompletionProposal proposal : proposals) {
|
for (ICompletionProposal proposal : proposals) {
|
||||||
String pdString = proposal.getDisplayString();
|
String pdString = proposal.getDisplayString();
|
||||||
Matcher pdMatch = CHANGE_TO.matcher(pdString);
|
Matcher pdMatch = CHANGE_TO.matcher(pdString);
|
||||||
if (pdMatch.matches()) {
|
if (pdMatch.matches()) {
|
||||||
String replString = pdMatch.group(1).toUpperCase();
|
String replString = pdMatch.group(1);
|
||||||
// proposals may include case changes, which get lost
|
// proposals may include case changes, which get lost
|
||||||
//if (replString != badWord) {
|
// if (replString != badWord) {
|
||||||
if (!swearWords.contains(replString) && !replString.equals(badWord)) {
|
if (!swearWords.contains(replString)
|
||||||
|
&& !replString.equals(badWord)) {
|
||||||
suggestionList.add(replString);
|
suggestionList.add(replString);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -370,7 +373,7 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
|
|
||||||
StyleRange styleRange = styledText.getStyleRangeAtOffset(problem
|
StyleRange styleRange = styledText.getStyleRangeAtOffset(problem
|
||||||
.getOffset());
|
.getOffset());
|
||||||
if (styleRange == null || styleRange.isUnstyled()
|
if ((styleRange == null) || styleRange.isUnstyled()
|
||||||
|| styleRange.similarTo(REDSTYLE)) {
|
|| styleRange.similarTo(REDSTYLE)) {
|
||||||
if (ignoreAll.contains(badWord)) {
|
if (ignoreAll.contains(badWord)) {
|
||||||
scanForErrors();
|
scanForErrors();
|
||||||
|
@ -407,6 +410,7 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
* org.eclipse.ui.texteditor.spelling.ISpellingProblemCollector#beginCollecting
|
* org.eclipse.ui.texteditor.spelling.ISpellingProblemCollector#beginCollecting
|
||||||
* ()
|
* ()
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public void beginCollecting() {
|
public void beginCollecting() {
|
||||||
// nothing at present
|
// nothing at present
|
||||||
}
|
}
|
||||||
|
@ -531,7 +535,7 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
probStart = matcher.start(2);
|
probStart = matcher.start(2);
|
||||||
// Only replace unstyled (unlocked) instances
|
// Only replace unstyled (unlocked) instances
|
||||||
styleRange = styledText.getStyleRangeAtOffset(probStart);
|
styleRange = styledText.getStyleRangeAtOffset(probStart);
|
||||||
if (styleRange == null || styleRange.isUnstyled()) {
|
if ((styleRange == null) || styleRange.isUnstyled()) {
|
||||||
repList.addFirst(Integer.valueOf(probStart));
|
repList.addFirst(Integer.valueOf(probStart));
|
||||||
}
|
}
|
||||||
found = matcher.find();
|
found = matcher.find();
|
||||||
|
@ -582,7 +586,8 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
try {
|
try {
|
||||||
userDLFile.save();
|
userDLFile.save();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
statusHandler.handle(Priority.PROBLEM, "Error saving user dictionary", e);
|
statusHandler.handle(Priority.PROBLEM,
|
||||||
|
"Error saving user dictionary", e);
|
||||||
}
|
}
|
||||||
// The spell check job might have a backlog of errors
|
// The spell check job might have a backlog of errors
|
||||||
// for this word, which no longer apply.
|
// for this word, which no longer apply.
|
||||||
|
@ -658,6 +663,7 @@ public class SpellCheckDlg extends Dialog implements ISpellingProblemCollector {
|
||||||
* org.eclipse.ui.texteditor.spelling.ISpellingProblemCollector#endCollecting
|
* org.eclipse.ui.texteditor.spelling.ISpellingProblemCollector#endCollecting
|
||||||
* ()
|
* ()
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public void endCollecting() {
|
public void endCollecting() {
|
||||||
MessageDialog.openInformation(shell, "", "Done checking document");
|
MessageDialog.openInformation(shell, "", "Done checking document");
|
||||||
styledText.setSelectionRange(0, 0);
|
styledText.setSelectionRange(0, 0);
|
||||||
|
|
|
@ -26,7 +26,6 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.eclipse.jface.preference.IPreferenceStore;
|
import org.eclipse.jface.preference.IPreferenceStore;
|
||||||
import org.eclipse.ui.application.WorkbenchAdvisor;
|
|
||||||
import org.osgi.framework.Bundle;
|
import org.osgi.framework.Bundle;
|
||||||
|
|
||||||
import com.raytheon.uf.common.comm.HttpClient;
|
import com.raytheon.uf.common.comm.HttpClient;
|
||||||
|
@ -52,6 +51,7 @@ import com.raytheon.uf.viz.thinclient.localization.LocalizationCachePersistence;
|
||||||
import com.raytheon.uf.viz.thinclient.localization.ThinClientLocalizationInitializer;
|
import com.raytheon.uf.viz.thinclient.localization.ThinClientLocalizationInitializer;
|
||||||
import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants;
|
import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants;
|
||||||
import com.raytheon.uf.viz.thinclient.refresh.TimedRefresher;
|
import com.raytheon.uf.viz.thinclient.refresh.TimedRefresher;
|
||||||
|
import com.raytheon.viz.ui.personalities.awips.AWIPSWorkbenchAdvisor;
|
||||||
import com.raytheon.viz.ui.personalities.awips.AbstractAWIPSComponent;
|
import com.raytheon.viz.ui.personalities.awips.AbstractAWIPSComponent;
|
||||||
import com.raytheon.viz.ui.personalities.awips.CAVE;
|
import com.raytheon.viz.ui.personalities.awips.CAVE;
|
||||||
|
|
||||||
|
@ -178,12 +178,11 @@ public class ThinClientComponent extends CAVE implements IThinClientComponent {
|
||||||
/*
|
/*
|
||||||
* (non-Javadoc)
|
* (non-Javadoc)
|
||||||
*
|
*
|
||||||
* @see com.raytheon.viz.ui.personalities.awips.AbstractCAVEComponent#
|
* @see com.raytheon.viz.ui.personalities.awips.AbstractAWIPSComponent#
|
||||||
* getWorkbenchAdvisor()
|
* createAWIPSWorkbenchAdvisor()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected WorkbenchAdvisor getWorkbenchAdvisor() {
|
protected AWIPSWorkbenchAdvisor createAWIPSWorkbenchAdvisor() {
|
||||||
// Use custom workbench advisor, will add thin client preferences page
|
|
||||||
return new ThinClientWorkbenchAdvisor();
|
return new ThinClientWorkbenchAdvisor();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -111,8 +111,7 @@ import com.vividsolutions.jts.geom.LineString;
|
||||||
* 08-21-2014 DR 15700 Qinglu Lin handle the situation where frameTime is null in paintTrack().
|
* 08-21-2014 DR 15700 Qinglu Lin handle the situation where frameTime is null in paintTrack().
|
||||||
* 09-09-2014 RM #657 Qinglu Lin handle StormTrackState.trackType is null.
|
* 09-09-2014 RM #657 Qinglu Lin handle StormTrackState.trackType is null.
|
||||||
* 09-25-2014 ASM #16773 D. Friedman Fix NPE.
|
* 09-25-2014 ASM #16773 D. Friedman Fix NPE.
|
||||||
*
|
* 10-10-2014 ASM #16844 D. Friedman Prevent some errors when moving track.
|
||||||
>>>>>>> master_14.2.4
|
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author mschenke
|
* @author mschenke
|
||||||
|
@ -841,7 +840,9 @@ public class StormTrackDisplay implements IRenderable {
|
||||||
PaintProperties paintProps) throws ImpossibleTrackException {
|
PaintProperties paintProps) throws ImpossibleTrackException {
|
||||||
int moveIndex = this.trackUtil.getCurrentFrame(paintProps
|
int moveIndex = this.trackUtil.getCurrentFrame(paintProps
|
||||||
.getFramesInfo());
|
.getFramesInfo());
|
||||||
|
moveIndex = Math.min(moveIndex, state.timePoints.length - 1);
|
||||||
int pivotIndex = state.displayedPivotIndex;
|
int pivotIndex = state.displayedPivotIndex;
|
||||||
|
pivotIndex = Math.min(pivotIndex, state.timePoints.length - 1);
|
||||||
|
|
||||||
double angle;
|
double angle;
|
||||||
double oppositeAngle;
|
double oppositeAngle;
|
||||||
|
|
|
@ -0,0 +1,475 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# SVN: $Revision$ - $Date$
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# This software is in the public domain, furnished "as is", without technical
|
||||||
|
# support, and with no warranty, express or implied, as to its usefulness for
|
||||||
|
# any purpose.
|
||||||
|
#
|
||||||
|
# serpISC - version 1.7
|
||||||
|
#
|
||||||
|
# Changes an existing grid to blend better into neighboring ISC grids.
|
||||||
|
# Can be used as an initial or final step in coordination. Only your grids
|
||||||
|
# are affected: nothing happens to the ISC grids. The ISC button must have
|
||||||
|
# been clicked on at least once before using this tool.
|
||||||
|
#
|
||||||
|
# Every point on the outer perimeter of CWA (i.e, belonging to selected ISCs)
|
||||||
|
# takes part in a serp adjustment of the existing grid. If any ISC grids are
|
||||||
|
# missing or not selected on a CWA boundary, your own grid is used there instead.
|
||||||
|
#
|
||||||
|
# You can use this tool on one ISC at a time to see how each one would influence
|
||||||
|
# your grid. To fit all ISC boundaries at once you must have all of them clicked
|
||||||
|
# on. Running the tool sequentially on each ISC will retain previous results if
|
||||||
|
# you keep the older ones turned on, but different sequences will yield slightly
|
||||||
|
# different results.
|
||||||
|
#
|
||||||
|
# Make sure your grid does not have an artificial boundary near the CWA border.
|
||||||
|
# Otherwise, it might already match your ISC neighbor there, so the tool won't
|
||||||
|
# adjust anything and your artificial boundary will remain.
|
||||||
|
#
|
||||||
|
# You can include or exclude as many sample points within your CWA as you like, but
|
||||||
|
# sample points close to an ISC border can create unrealistic gradients.
|
||||||
|
#
|
||||||
|
# You can match a border only partway if you want. Suppose you want to meet your
|
||||||
|
# ISC neighbor half way. Then set the "percent of full match" to 50. After sending
|
||||||
|
# your ISC grid, your neighbor will want to match FULL way (not half) to meet the
|
||||||
|
# newly received grid. You can also use "percent of full match" to nudge your
|
||||||
|
# grid to your neighbors' grids.
|
||||||
|
#
|
||||||
|
# If your grid's duration spans several shorter-duration ISC grids, the ISC
|
||||||
|
# grids will be time-averaged first (except for PoP which always uses the
|
||||||
|
# maximum value) and the fit will be inexact. Or, if the ISC grids themselves
|
||||||
|
# don't match at a CWA boundary (something you can't do in your own grid), the
|
||||||
|
# the tool will converge intermediate contours to the point of the mismatch,
|
||||||
|
# and the fit will look artificial.
|
||||||
|
#
|
||||||
|
# For winds serp runs twice, once for u and once for v.
|
||||||
|
#
|
||||||
|
# This tool cannot be used with Wx grids.
|
||||||
|
#
|
||||||
|
# Authors: Les Colin - WFO Boise, ID, and Tim Barker - SOO Boise, ID
|
||||||
|
#
|
||||||
|
# 2003/06/21 - Revised "remoteness" calculation (to counteract observation-
|
||||||
|
# clustering). New module is called getGoodRemoteness.
|
||||||
|
# numpy-Python code: Barker. Algorithm: Colin.
|
||||||
|
# 2003/06/22 - Analyzes winds in u and v components, rather than by speed
|
||||||
|
# and direction.
|
||||||
|
# 2003/06/23 - Finishes tool by copying ISC data outside CWA.
|
||||||
|
# 2003/10/29 - Runs serp without considering sample points, then runs it
|
||||||
|
# again only on the samples. ISC-copy feature has been removed.
|
||||||
|
# 2004/05/30 - Uses improved serp analysis (see Barker). Can include or exclude
|
||||||
|
# various ISC neighbors. Can include or exclude currently displayed
|
||||||
|
# samples within your CWA. Samples in the ISC areas are ignored.
|
||||||
|
# 2004/07/09 - Modified to ignore duplicate sample points (previously, they
|
||||||
|
# would hang the tool). Also modified tool to allow partial match
|
||||||
|
# so that CWA grid adjusts only partway toward ISC grid.
|
||||||
|
# 2004/09/04 - Modified to work on an edit area, perhaps only half way across the
|
||||||
|
# home CWA. The effect is a taper from a full (or partial) adjustment
|
||||||
|
# at designated ISC borders to zero change inside the home CWA where
|
||||||
|
# the edit area stops.
|
||||||
|
# 2004/09/21 - Now works even if preceded by ISC_Copy (by moving the home CWA-border
|
||||||
|
# inward one pixel and comparing to nearest ISC neighbor values).
|
||||||
|
# Tool completes by running an equivalent ISC_Copy on the selected ISC
|
||||||
|
# borders. Tool now also contains a thinning feature to speed up
|
||||||
|
# execution. e.g., thinning by 2 runs the tool on alternate border
|
||||||
|
# points, thinning by 3 runs the tool on every third border point, etc.
|
||||||
|
# 2004/09/25 - Corrected bug in preceding version in which sample points could possibly
|
||||||
|
# coincide with the revised home CWA-border points and hang the tool.
|
||||||
|
# 2004/11/10 - Final ISC_Copy feature made optional.
|
||||||
|
# 2004/11/17 - Corrected return statement at end of tool, and repaired code when
|
||||||
|
# NOT adjusting for elevation.
|
||||||
|
# 2008/07/31 - added int() for arguments to createTimeRange for OB8.3. /TB
|
||||||
|
# 2012/07/13 - Version 1.7. AWIPS2 Port.
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
ToolType = "numeric"
|
||||||
|
WeatherElementEdited = "variableElement"
|
||||||
|
ScreenList=["SCALAR","VECTOR"]
|
||||||
|
|
||||||
|
#
|
||||||
|
#====================================================================
|
||||||
|
# Part to modify for local configuration
|
||||||
|
defaultCWA="STO"
|
||||||
|
VariableList=[
|
||||||
|
("Include these WFOs:",["MTR","EKA","HNX","REV","MFR"],"check",["MTR","EKA","HNX","REV","MFR"]),
|
||||||
|
("Intentional mismatch (CWA minus WFO):","0","alphaNumeric"),
|
||||||
|
("Currently displayed CWA sample points:","Use","radio",["Use","Don't use"]),
|
||||||
|
("Adjust for terrain elevation?","Yes","radio",["Yes","No"]),
|
||||||
|
("Elevation Factor",36,"numeric"),
|
||||||
|
("Tool thinning-factor:",1,"scale",[1,10],1),
|
||||||
|
("Percent of full match",100,"scale",[0,100],1),
|
||||||
|
("Copy ISC data in afterward?","No","radio",["Yes","No"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
from numpy import *
|
||||||
|
import ObjAnal
|
||||||
|
import SmartScript
|
||||||
|
import time
|
||||||
|
from math import sin,cos,acos,pi
|
||||||
|
|
||||||
|
class Tool (SmartScript.SmartScript):
|
||||||
|
def __init__(self, dbss):
|
||||||
|
self._dbss=dbss
|
||||||
|
SmartScript.SmartScript.__init__(self, dbss)
|
||||||
|
def preProcessTool(self,varDict):
|
||||||
|
self.OA = ObjAnal.ObjAnal(self._dbss)
|
||||||
|
|
||||||
|
def execute(self, variableElement, variableElement_GridInfo, editArea, varDict, Topo, WEname, GridTimeRange):
|
||||||
|
|
||||||
|
wxType = variableElement_GridInfo.getGridType().ordinal()
|
||||||
|
defCWA=self.getEditArea(defaultCWA)
|
||||||
|
defcwa=self.encodeEditArea(defCWA)
|
||||||
|
nondefcwa=1-defcwa # i.e., toggle
|
||||||
|
nondefCWA=self.decodeEditArea(nondefcwa)
|
||||||
|
defea=self.taperGrid(nondefCWA,2)*2
|
||||||
|
|
||||||
|
# The above line defines the default CWA area as defea==0, the outer perimeter of the default CWA
|
||||||
|
# as defea==1, and further outside as defea==2.
|
||||||
|
|
||||||
|
arbea=self.encodeEditArea(editArea)
|
||||||
|
nonarbea=1-arbea
|
||||||
|
nonarbEA=self.decodeEditArea(nonarbea)
|
||||||
|
arbea=self.taperGrid(nonarbEA,2)*2
|
||||||
|
|
||||||
|
cwa=zeros(Topo.shape)
|
||||||
|
ISC=varDict["Include these WFOs:"]
|
||||||
|
samps=varDict["Currently displayed CWA sample points:"]
|
||||||
|
thin=varDict["Tool thinning-factor:"]
|
||||||
|
partial=varDict["Percent of full match"]*.01
|
||||||
|
|
||||||
|
for WFO in ISC:
|
||||||
|
CWA=self.getEditArea(WFO)
|
||||||
|
cwa=self.encodeEditArea(CWA)+cwa
|
||||||
|
|
||||||
|
alltrs=self._getAllHourlyTimeRanges(GridTimeRange)
|
||||||
|
if ((WEname=="MaxT")or(WEname=="PoP")):
|
||||||
|
sum=zeros(Topo.shape)-150.0
|
||||||
|
elif (WEname=="MinT"):
|
||||||
|
sum=zeros(Topo.shape)+150.0
|
||||||
|
else:
|
||||||
|
if (wxType==2):
|
||||||
|
sum=[zeros(Topo.shape),zeros(Topo.shape)]
|
||||||
|
else:
|
||||||
|
sum=zeros(Topo.shape)
|
||||||
|
cnt=zeros(Topo.shape)
|
||||||
|
|
||||||
|
for tr in alltrs:
|
||||||
|
isc=self.getComposite(WEname,tr,0)
|
||||||
|
if isc is None:
|
||||||
|
|
||||||
|
continue
|
||||||
|
#
|
||||||
|
# Add to sums, or min/max
|
||||||
|
#
|
||||||
|
if wxType==1: # SCALAR
|
||||||
|
bits,iscgrid=isc
|
||||||
|
if ((WEname=="MaxT")or(WEname=="PoP")):
|
||||||
|
sum=where(bits,maximum(iscgrid,sum),sum)
|
||||||
|
cnt=where(bits,1,cnt)
|
||||||
|
elif (WEname=="MinT"):
|
||||||
|
sum=where(bits,minimum(iscgrid,sum),sum)
|
||||||
|
cnt=where(bits,1,cnt)
|
||||||
|
else:
|
||||||
|
sum=where(bits,sum+iscgrid,sum)
|
||||||
|
cnt=where(bits,cnt+1,cnt)
|
||||||
|
if wxType==2: # VECTOR
|
||||||
|
bits,mag,dir=isc
|
||||||
|
(u,v)=self.MagDirToUV(mag,dir)
|
||||||
|
sum[0]=where(bits,sum[0]+u,sum[0])
|
||||||
|
sum[1]=where(bits,sum[1]+v,sum[1])
|
||||||
|
cnt=where(bits,cnt+1,cnt)
|
||||||
|
#
|
||||||
|
# now calculate average/max/min, etc.
|
||||||
|
# (count is always 1 for max/min)
|
||||||
|
#
|
||||||
|
if ((wxType==1)or(wxType==2)):
|
||||||
|
if (wxType==2):
|
||||||
|
(mag,dir)=variableElement
|
||||||
|
(u,v)=self.MagDirToUV(mag,dir)
|
||||||
|
sum[0]=where(equal(cnt,0),u,sum[0])
|
||||||
|
sum[1]=where(equal(cnt,0),v,sum[1])
|
||||||
|
else:
|
||||||
|
sum=where(equal(cnt,0),variableElement,sum)
|
||||||
|
cnt=where(equal(cnt,0),1,cnt)
|
||||||
|
new=sum/cnt
|
||||||
|
if (wxType==2):
|
||||||
|
(mag,dir)=self.UVToMagDir(new[0],new[1])
|
||||||
|
newvec=(mag,dir)
|
||||||
|
|
||||||
|
self.elevadjust=0
|
||||||
|
self.elevfactor=0.
|
||||||
|
if varDict["Adjust for terrain elevation?"]=="Yes":
|
||||||
|
self.elevadjust=1
|
||||||
|
self.elevfactor=varDict["Elevation Factor"]
|
||||||
|
if self.elevfactor<1:
|
||||||
|
self.elevfactor=0.
|
||||||
|
|
||||||
|
self.xloclist=[]
|
||||||
|
self.yloclist=[]
|
||||||
|
self.hloclist=[]
|
||||||
|
self.zlist=[]
|
||||||
|
self.ulist=[]
|
||||||
|
self.vlist=[]
|
||||||
|
|
||||||
|
for x in range(1,Topo.shape[1]-1):
|
||||||
|
for y in range(1,Topo.shape[0]-1):
|
||||||
|
if (x+y)%thin!=0:
|
||||||
|
continue
|
||||||
|
if (arbea[y,x]<2 and defea[y,x]==0):
|
||||||
|
if (cwa[y,x+1]==1) or (cwa[y,x-1]==1) or (cwa[y+1,x]==1) or (cwa[y-1,x]==1):
|
||||||
|
if self.elevadjust==1:
|
||||||
|
self.hloclist.append(Topo[y,x])
|
||||||
|
else:
|
||||||
|
self.hloclist.append(0.)
|
||||||
|
self.xloclist.append(x)
|
||||||
|
self.yloclist.append(y)
|
||||||
|
if wxType==1:
|
||||||
|
chgval=0.
|
||||||
|
n=0
|
||||||
|
if cwa[y,x+1]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgval=chgval+(new[y,x+1]-variableElement[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y,x+1])
|
||||||
|
if elevdif<5000.:
|
||||||
|
# ISC-CWA neighbors more than 5000 ft apart in elevation are too
|
||||||
|
# dissimilar to compare.
|
||||||
|
chgval=chgval+(new[y,x+1]-variableElement[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
if cwa[y,x-1]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgval=chgval+(new[y,x-1]-variableElement[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y,x-1])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgval=chgval+(new[y,x-1]-variableElement[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
if cwa[y+1,x]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgval=chgval+(new[y+1,x]-variableElement[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y+1,x])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgval=chgval+(new[y+1,x]-variableElement[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
if cwa[y-1,x]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgval=chgval+(new[y-1,x]-variableElement[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y-1,x])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgval=chgval+(new[y-1,x]-variableElement[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
self.zlist.append((chgval/n)*partial)
|
||||||
|
|
||||||
|
elif wxType==2:
|
||||||
|
(magcwa,dircwa)=variableElement
|
||||||
|
(ucwa,vcwa)=self.MagDirToUV(magcwa,dircwa)
|
||||||
|
(uisc,visc)=self.MagDirToUV(mag,dir)
|
||||||
|
chgu=0.
|
||||||
|
chgv=0.
|
||||||
|
n=0
|
||||||
|
if cwa[y,x+1]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgu=chgu+(uisc[y,x+1]-ucwa[y,x])
|
||||||
|
chgv=chgv+(visc[y,x+1]-vcwa[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y,x+1])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgu=chgu+(uisc[y,x+1]-ucwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
chgv=chgv+(visc[y,x+1]-vcwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
if cwa[y,x-1]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgu=chgu+(uisc[y,x-1]-ucwa[y,x])
|
||||||
|
chgv=chgv+(visc[y,x-1]-vcwa[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y,x-1])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgu=chgu+(uisc[y,x-1]-ucwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
chgv=chgv+(visc[y,x-1]-vcwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
if cwa[y+1,x]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgu=chgu+(uisc[y+1,x]-ucwa[y,x])
|
||||||
|
chgv=chgv+(visc[y+1,x]-vcwa[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y+1,x])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgu=chgu+(uisc[y+1,x]-ucwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
chgv=chgv+(visc[y+1,x]-vcwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
if cwa[y-1,x]==1:
|
||||||
|
if self.elevadjust==0:
|
||||||
|
chgu=chgu+(uisc[y-1,x]-ucwa[y,x])
|
||||||
|
chgv=chgv+(visc[y-1,x]-vcwa[y,x])
|
||||||
|
elif self.elevadjust==1:
|
||||||
|
elevdif=abs(Topo[y,x]-Topo[y-1,x])
|
||||||
|
if elevdif<5000.:
|
||||||
|
chgu=chgu+(uisc[y-1,x]-ucwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
chgv=chgv+(visc[y-1,x]-vcwa[y,x])*(1.0-elevdif/5000.)
|
||||||
|
n=n+1
|
||||||
|
self.ulist.append((chgu/n)*partial)
|
||||||
|
self.vlist.append((chgv/n)*partial)
|
||||||
|
if arbea[y,x]==1 and defea[y,x]==0:
|
||||||
|
self.pointok=0
|
||||||
|
for nn in range(len(self.xloclist)):
|
||||||
|
if (y==self.yloclist[nn]) and (x==self.xloclist[nn]):
|
||||||
|
self.pointok=1
|
||||||
|
# In the above line an edit area IS on the screen and here we're looking for boundary points
|
||||||
|
# inside the home CWA that are more than one pixel from the border. We want to hold these
|
||||||
|
# points steady (i.e., zero change).
|
||||||
|
if self.pointok==1: # we already have this point, don't use it twice.
|
||||||
|
continue
|
||||||
|
self.xloclist.append(x)
|
||||||
|
self.yloclist.append(y)
|
||||||
|
if self.elevadjust==1:
|
||||||
|
self.hloclist.append(Topo[y,x])
|
||||||
|
else:
|
||||||
|
self.hloclist.append(0.)
|
||||||
|
if wxType==1:
|
||||||
|
self.zlist.append(0.)
|
||||||
|
if wxType==2:
|
||||||
|
self.ulist.append(0.)
|
||||||
|
self.vlist.append(0.)
|
||||||
|
|
||||||
|
if samps=="Use":
|
||||||
|
self.samplePoints = self.getSamplePoints(None)
|
||||||
|
for sample in self.samplePoints:
|
||||||
|
(x,y)=sample
|
||||||
|
self.sampleok=0
|
||||||
|
for count in range(len(self.xloclist)):
|
||||||
|
if ((x==self.xloclist[count]) and (y==self.yloclist[count])):
|
||||||
|
self.sampleok=1
|
||||||
|
# self.sampleok becomes 1 for a duplicate entry, so bypass the duplicate.
|
||||||
|
if self.sampleok==1:
|
||||||
|
continue
|
||||||
|
if x<0 or x>Topo.shape[1]-1:
|
||||||
|
continue
|
||||||
|
if y<0 or y>Topo.shape[0]-1:
|
||||||
|
continue
|
||||||
|
if defea[y,x]!=0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.elevadjust==1:
|
||||||
|
self.hloclist.append(Topo[y,x])
|
||||||
|
else:
|
||||||
|
self.hloclist.append(0.)
|
||||||
|
self.xloclist.append(x)
|
||||||
|
self.yloclist.append(y)
|
||||||
|
if wxType==1:
|
||||||
|
self.zlist.append(0.)
|
||||||
|
if wxType==2:
|
||||||
|
self.ulist.append(0.)
|
||||||
|
self.vlist.append(0.)
|
||||||
|
#
|
||||||
|
# Don't proceed if no points
|
||||||
|
#
|
||||||
|
if len(self.xloclist)==0:
|
||||||
|
self.statusBarMsg("No data available to serp to...","R")
|
||||||
|
return variableElement
|
||||||
|
else:
|
||||||
|
print " the number of points being used:",len(self.xloclist)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
if wxType==1: # scalar
|
||||||
|
zval=self.OA.Serp(self.zlist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo)
|
||||||
|
# zval is the new scalar-change grid.
|
||||||
|
if varDict["Copy ISC data in afterward?"]=="Yes":
|
||||||
|
znew=where(logical_or(equal(defea,0),equal(cwa,0)),variableElement+zval,new)
|
||||||
|
else:
|
||||||
|
znew=variableElement+zval
|
||||||
|
|
||||||
|
if wxType==2: # vector
|
||||||
|
zval=self.OA.Serp(self.ulist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo)
|
||||||
|
# zval is the new u-change grid.
|
||||||
|
if varDict["Copy ISC data in afterward?"]=="Yes":
|
||||||
|
newu=where(logical_or(equal(defea,0),equal(cwa,0)),ucwa+zval,new[0])
|
||||||
|
else:
|
||||||
|
newu=ucwa+zval
|
||||||
|
zval=self.OA.Serp(self.vlist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo)
|
||||||
|
# this zval is the new v-change grid.
|
||||||
|
if varDict["Copy ISC data in afterward?"]=="Yes":
|
||||||
|
newv=where(logical_or(equal(defea,0),equal(cwa,0)),vcwa+zval,new[1])
|
||||||
|
else:
|
||||||
|
newv=vcwa+zval
|
||||||
|
(newspd,newdir)=self.UVToMagDir(newu,newv)
|
||||||
|
# newspd=where(equal(defea+cwa,0),newspd,mag)
|
||||||
|
# newdir=where(equal(defea+cwa,0),newdir,dir)
|
||||||
|
|
||||||
|
znew=(newspd,newdir)
|
||||||
|
|
||||||
|
absmax=variableElement_GridInfo.getMaxValue()
|
||||||
|
absmin=variableElement_GridInfo.getMinValue()
|
||||||
|
|
||||||
|
if wxType==1:
|
||||||
|
return clip(znew,absmin,absmax)
|
||||||
|
else:
|
||||||
|
return znew
|
||||||
|
|
||||||
|
#===================================================================
|
||||||
|
# _getAllHourlyTimeRanges - gets a list of all 1-hour time ranges
|
||||||
|
# within the specified time range
|
||||||
|
#
|
||||||
|
def _getAllHourlyTimeRanges(self,tr):
|
||||||
|
#
|
||||||
|
# get integer time of UTC midnight today
|
||||||
|
#
|
||||||
|
secsinhour=60*60
|
||||||
|
lt=time.gmtime()
|
||||||
|
mid=time.mktime((lt[0],lt[1],lt[2],0,0,0,lt[6],lt[7],lt[8]))
|
||||||
|
#
|
||||||
|
# get integer time of input timerange start
|
||||||
|
#
|
||||||
|
start=tr.startTime()
|
||||||
|
year=start.year
|
||||||
|
month=start.month
|
||||||
|
day=start.day
|
||||||
|
hour=start.hour
|
||||||
|
trs=time.mktime((year,month,day,hour,0,0,lt[6],lt[7],lt[8]))
|
||||||
|
#
|
||||||
|
# get integer time of input timerange end
|
||||||
|
#
|
||||||
|
end=tr.endTime()
|
||||||
|
year=end.year
|
||||||
|
month=end.month
|
||||||
|
day=end.day
|
||||||
|
hour=end.hour
|
||||||
|
tre=time.mktime((year,month,day,hour,0,0,lt[6],lt[7],lt[8]))
|
||||||
|
#
|
||||||
|
# The difference between start/end determines number of hours
|
||||||
|
#
|
||||||
|
numhours=int((tre-trs)/secsinhour)
|
||||||
|
#
|
||||||
|
# Difference between mid/start determines starting offset
|
||||||
|
#
|
||||||
|
offset=int((trs-mid)/secsinhour)
|
||||||
|
#
|
||||||
|
# create each hourly time range from offset
|
||||||
|
#
|
||||||
|
alltrs=[]
|
||||||
|
for hour in range(0,numhours):
|
||||||
|
newtr=self.createTimeRange(int(offset+hour),int(offset+hour+1),"Zulu")
|
||||||
|
alltrs.append(newtr)
|
||||||
|
|
||||||
|
return alltrs
|
|
@ -33,6 +33,7 @@
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 02/12/2014 #2591 randerso Added retry when loading combinations fails
|
# 02/12/2014 #2591 randerso Added retry when loading combinations fails
|
||||||
|
# 10/20/2014 #3685 randerso Changed default of lowerCase to True if not specified
|
||||||
|
|
||||||
import string, getopt, sys, time, os, types, math
|
import string, getopt, sys, time, os, types, math
|
||||||
import ModuleAccessor
|
import ModuleAccessor
|
||||||
|
@ -191,7 +192,7 @@ class TextFormatter:
|
||||||
if language is not None:
|
if language is not None:
|
||||||
text = product.translateForecast(text, language)
|
text = product.translateForecast(text, language)
|
||||||
# Convert to Upper Case
|
# Convert to Upper Case
|
||||||
if not forecastDef.get('lowerCase', 0):
|
if not forecastDef.get('lowerCase', True):
|
||||||
text = text.upper()
|
text = text.upper()
|
||||||
else:
|
else:
|
||||||
text = "Text Product Type Invalid " + \
|
text = "Text Product Type Invalid " + \
|
||||||
|
|
|
@ -27,6 +27,12 @@
|
||||||
#
|
#
|
||||||
# Author: hansen
|
# Author: hansen
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 10/20/2014 #3685 randerso Changes to support mixed case products
|
||||||
|
|
||||||
import EditAreaUtils
|
import EditAreaUtils
|
||||||
import StringUtils
|
import StringUtils
|
||||||
|
@ -49,7 +55,7 @@ class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils):
|
||||||
cityDescriptor ="Including the cities of",
|
cityDescriptor ="Including the cities of",
|
||||||
areaList=None, includeCities=1, includeZoneNames=1,
|
areaList=None, includeCities=1, includeZoneNames=1,
|
||||||
includeIssueTime=1, includeCodes=1, includeVTECString=1,
|
includeIssueTime=1, includeCodes=1, includeVTECString=1,
|
||||||
hVTECString=None, accurateCities=False):
|
hVTECString=None, accurateCities=False, upperCase=True):
|
||||||
# Make a UGC area header for the given areaLabel
|
# Make a UGC area header for the given areaLabel
|
||||||
# Determine list of areas (there could be more than one if we are using a combination)
|
# Determine list of areas (there could be more than one if we are using a combination)
|
||||||
|
|
||||||
|
@ -227,7 +233,17 @@ class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils):
|
||||||
if cityString != "":
|
if cityString != "":
|
||||||
numCities = len(string.split(cityString, "...")[1:])
|
numCities = len(string.split(cityString, "...")[1:])
|
||||||
if numCities == 1:
|
if numCities == 1:
|
||||||
cityDescriptor = string.replace(cityDescriptor, "CITIES", "CITY")
|
def preserveCase(matchobj):
|
||||||
|
orig = matchobj.group(0)
|
||||||
|
repl = 'city'
|
||||||
|
retv = ''
|
||||||
|
for i in range(len(repl)):
|
||||||
|
c = repl[i]
|
||||||
|
if orig[i].isupper():
|
||||||
|
c = c.upper()
|
||||||
|
retv = retv + c
|
||||||
|
return retv
|
||||||
|
cityDescriptor = re.sub("cities", preserveCase, cityDescriptor, flags=re.IGNORECASE)
|
||||||
cityString = self.endline(cityDescriptor + cityString,
|
cityString = self.endline(cityDescriptor + cityString,
|
||||||
linelength=self._lineLength, breakStr=["..."])
|
linelength=self._lineLength, breakStr=["..."])
|
||||||
issueTimeStr = issueTimeStr + "\n\n"
|
issueTimeStr = issueTimeStr + "\n\n"
|
||||||
|
@ -249,6 +265,8 @@ class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils):
|
||||||
if includeVTECString == 0:
|
if includeVTECString == 0:
|
||||||
VTECString = ""
|
VTECString = ""
|
||||||
header = codeString + VTECString + nameString + cityString + issueTimeStr
|
header = codeString + VTECString + nameString + cityString + issueTimeStr
|
||||||
|
if upperCase:
|
||||||
|
header = header.upper()
|
||||||
return header
|
return header
|
||||||
|
|
||||||
# Make accurate city list based on the grids
|
# Make accurate city list based on the grids
|
||||||
|
@ -569,8 +587,8 @@ class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils):
|
||||||
if entry.has_key("fullStateName"):
|
if entry.has_key("fullStateName"):
|
||||||
state = entry["fullStateName"]
|
state = entry["fullStateName"]
|
||||||
#Special District of Columbia case
|
#Special District of Columbia case
|
||||||
if state == "DISTRICT OF COLUMBIA":
|
if state.upper() == "DISTRICT OF COLUMBIA":
|
||||||
state = "THE DISTRICT OF COLUMBIA"
|
state = "The District of Columbia"
|
||||||
# Get part-of-state information
|
# Get part-of-state information
|
||||||
partOfState = ""
|
partOfState = ""
|
||||||
if entry.has_key("partOfState"):
|
if entry.has_key("partOfState"):
|
||||||
|
@ -583,15 +601,15 @@ class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils):
|
||||||
if entry.has_key("ugcCode"):
|
if entry.has_key("ugcCode"):
|
||||||
codeType = entry["ugcCode"][2]
|
codeType = entry["ugcCode"][2]
|
||||||
if codeType == "Z":
|
if codeType == "Z":
|
||||||
nameType = "ZONE"
|
nameType = "zone"
|
||||||
elif codeType == "C":
|
elif codeType == "C":
|
||||||
indCty=entry.get("independentCity", 0)
|
indCty=entry.get("independentCity", 0)
|
||||||
if indCty == 1:
|
if indCty == 1:
|
||||||
nameType = "INDEPENDENT CITY"
|
nameType = "independent city"
|
||||||
elif state == "LOUISIANA":
|
elif state == "Louisiana":
|
||||||
nameType = "PARISH"
|
nameType = "parish"
|
||||||
else:
|
else:
|
||||||
nameType = "COUNTY"
|
nameType = "county"
|
||||||
else:
|
else:
|
||||||
codeType == "?"
|
codeType == "?"
|
||||||
value = (state, partOfState)
|
value = (state, partOfState)
|
||||||
|
|
|
@ -95,6 +95,7 @@ import com.raytheon.uf.common.activetable.VTECChange;
|
||||||
import com.raytheon.uf.common.activetable.VTECTableChangeNotification;
|
import com.raytheon.uf.common.activetable.VTECTableChangeNotification;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.textproduct.DraftProduct;
|
import com.raytheon.uf.common.dataplugin.gfe.textproduct.DraftProduct;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.textproduct.ProductDefinition;
|
import com.raytheon.uf.common.dataplugin.gfe.textproduct.ProductDefinition;
|
||||||
|
import com.raytheon.uf.common.dataplugin.text.db.MixedCaseProductSupport;
|
||||||
import com.raytheon.uf.common.jms.notification.INotificationObserver;
|
import com.raytheon.uf.common.jms.notification.INotificationObserver;
|
||||||
import com.raytheon.uf.common.jms.notification.NotificationException;
|
import com.raytheon.uf.common.jms.notification.NotificationException;
|
||||||
import com.raytheon.uf.common.jms.notification.NotificationMessage;
|
import com.raytheon.uf.common.jms.notification.NotificationMessage;
|
||||||
|
@ -158,6 +159,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
||||||
* 02/05/2014 17022 ryu Modified loadDraft() to fix merging of WMO heading and AWIPS ID.
|
* 02/05/2014 17022 ryu Modified loadDraft() to fix merging of WMO heading and AWIPS ID.
|
||||||
* 03/25/2014 #2884 randerso Added xxxid to check for disabling editor
|
* 03/25/2014 #2884 randerso Added xxxid to check for disabling editor
|
||||||
* 05/12/2014 16195 zhao Modified widgetSelected() for "Auto Wrap" option widget
|
* 05/12/2014 16195 zhao Modified widgetSelected() for "Auto Wrap" option widget
|
||||||
|
* 10/20/2014 #3685 randerso Made conversion to upper case conditional on product id
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -480,7 +482,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
break;
|
break;
|
||||||
case SWT.Show:
|
case SWT.Show:
|
||||||
if ((!dead)
|
if ((!dead)
|
||||||
&& (getProductText() != null || !getProductText()
|
&& ((getProductText() != null) || !getProductText()
|
||||||
.isEmpty())) {
|
.isEmpty())) {
|
||||||
timeUpdater.schedule();
|
timeUpdater.schedule();
|
||||||
}
|
}
|
||||||
|
@ -707,7 +709,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
Rectangle trim = p.computeTrim(0, 0, 0, 0);
|
Rectangle trim = p.computeTrim(0, 0, 0, 0);
|
||||||
Point dpi = p.getDPI();
|
Point dpi = p.getDPI();
|
||||||
int leftMargin = dpi.x + trim.x;
|
int leftMargin = dpi.x + trim.x;
|
||||||
int topMargin = dpi.y / 2 + trim.y;
|
int topMargin = (dpi.y / 2) + trim.y;
|
||||||
GC gc = new GC(p);
|
GC gc = new GC(p);
|
||||||
Font font = gc.getFont();
|
Font font = gc.getFont();
|
||||||
String printText = textComp.getProductText();
|
String printText = textComp.getProductText();
|
||||||
|
@ -866,8 +868,8 @@ public class ProductEditorComp extends Composite implements
|
||||||
autoWrapMI.addSelectionListener(new SelectionAdapter() {
|
autoWrapMI.addSelectionListener(new SelectionAdapter() {
|
||||||
@Override
|
@Override
|
||||||
public void widgetSelected(SelectionEvent e) {
|
public void widgetSelected(SelectionEvent e) {
|
||||||
wrapMode = !wrapMode;
|
wrapMode = !wrapMode;
|
||||||
textComp.setAutoWrapMode(wrapMode);
|
textComp.setAutoWrapMode(wrapMode);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1159,7 +1161,9 @@ public class ProductEditorComp extends Composite implements
|
||||||
textComp.getTextEditorST().setText(
|
textComp.getTextEditorST().setText(
|
||||||
textComp.getTextEditorST().getText() + "\n");
|
textComp.getTextEditorST().getText() + "\n");
|
||||||
}
|
}
|
||||||
textComp.upper();
|
if (!MixedCaseProductSupport.isMixedCase(getNNNid())) {
|
||||||
|
textComp.upper();
|
||||||
|
}
|
||||||
textComp.endUpdate();
|
textComp.endUpdate();
|
||||||
|
|
||||||
if (!frameCheck(false)) {
|
if (!frameCheck(false)) {
|
||||||
|
@ -1292,7 +1296,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
"Error sending active table request to http server ", e);
|
"Error sending active table request to http server ", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (records == null || records.isEmpty()) {
|
if ((records == null) || records.isEmpty()) {
|
||||||
activeVtecRecords = null;
|
activeVtecRecords = null;
|
||||||
} else {
|
} else {
|
||||||
if (pil != null) {
|
if (pil != null) {
|
||||||
|
@ -1391,7 +1395,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
activeRecs = getMatchingActiveVTEC(zones, oid, phen, sig,
|
activeRecs = getMatchingActiveVTEC(zones, oid, phen, sig,
|
||||||
etn);
|
etn);
|
||||||
String eventStr = "." + phen + "." + sig + "." + etn;
|
String eventStr = "." + phen + "." + sig + "." + etn;
|
||||||
if (activeRecs == null || activeRecs.isEmpty()) {
|
if ((activeRecs == null) || activeRecs.isEmpty()) {
|
||||||
statusHandler.handle(Priority.PROBLEM,
|
statusHandler.handle(Priority.PROBLEM,
|
||||||
"No active records found for " + vtec);
|
"No active records found for " + vtec);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1406,7 +1410,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
|
|
||||||
// segment invalid due to the event going into
|
// segment invalid due to the event going into
|
||||||
// effect in part of the segment area
|
// effect in part of the segment area
|
||||||
if (started > 0 && started < activeRecs.size()) {
|
if ((started > 0) && (started < activeRecs.size())) {
|
||||||
final String msg = "Event "
|
final String msg = "Event "
|
||||||
+ eventStr
|
+ eventStr
|
||||||
+ " has gone into effect in part"
|
+ " has gone into effect in part"
|
||||||
|
@ -1508,8 +1512,8 @@ public class ProductEditorComp extends Composite implements
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check start and ending time for end later than start
|
// Check start and ending time for end later than start
|
||||||
if (vtecStart != null && vtecEnd != null
|
if ((vtecStart != null) && (vtecEnd != null)
|
||||||
&& vtecStart.getTime() >= vtecEnd.getTime()) {
|
&& (vtecStart.getTime() >= vtecEnd.getTime())) {
|
||||||
setTabColorFunc(productStateEnum.New);
|
setTabColorFunc(productStateEnum.New);
|
||||||
String msg = "VTEC ending time is before "
|
String msg = "VTEC ending time is before "
|
||||||
+ "starting time. Product is invalid and must"
|
+ "starting time. Product is invalid and must"
|
||||||
|
@ -1520,13 +1524,13 @@ public class ProductEditorComp extends Composite implements
|
||||||
// Give 30 minutes of slack to a couple of action codes
|
// Give 30 minutes of slack to a couple of action codes
|
||||||
// check the ending time and transmission time
|
// check the ending time and transmission time
|
||||||
if ((action.equals("EXP") || action.equals("CAN"))
|
if ((action.equals("EXP") || action.equals("CAN"))
|
||||||
&& vtecEnd != null) {
|
&& (vtecEnd != null)) {
|
||||||
vtecEnd.setTime(vtecEnd.getTime() + 30
|
vtecEnd.setTime(vtecEnd.getTime()
|
||||||
* TimeUtil.MILLIS_PER_MINUTE);
|
+ (30 * TimeUtil.MILLIS_PER_MINUTE));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (vtecEnd != null
|
if ((vtecEnd != null)
|
||||||
&& vtecEnd.getTime() <= transmissionTime.getTime()) {
|
&& (vtecEnd.getTime() <= transmissionTime.getTime())) {
|
||||||
setTabColorFunc(productStateEnum.New);
|
setTabColorFunc(productStateEnum.New);
|
||||||
String msg = "VTEC ends before current time."
|
String msg = "VTEC ends before current time."
|
||||||
+ " Product is invalid and must be regenerated.";
|
+ " Product is invalid and must be regenerated.";
|
||||||
|
@ -1596,7 +1600,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
|
|
||||||
// time contains, if time range (tr) contains time (t), return 1 def
|
// time contains, if time range (tr) contains time (t), return 1 def
|
||||||
public boolean contains(Date t) {
|
public boolean contains(Date t) {
|
||||||
return t.getTime() >= startTime.getTime() && t.before(endTime);
|
return (t.getTime() >= startTime.getTime()) && t.before(endTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Date getStartTime() {
|
public Date getStartTime() {
|
||||||
|
@ -1650,7 +1654,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
zones = decodeUGCs(segData);
|
zones = decodeUGCs(segData);
|
||||||
vtecs = getVTEClines(segData);
|
vtecs = getVTEClines(segData);
|
||||||
newVtecs = fixVTEC(zones, vtecs, transmissionTime);
|
newVtecs = fixVTEC(zones, vtecs, transmissionTime);
|
||||||
if (newVtecs != null && !newVtecs.isEmpty()) {
|
if ((newVtecs != null) && !newVtecs.isEmpty()) {
|
||||||
textComp.replaceText(tipVtec, newVtecs);
|
textComp.replaceText(tipVtec, newVtecs);
|
||||||
}
|
}
|
||||||
} catch (VizException e) {
|
} catch (VizException e) {
|
||||||
|
@ -1761,7 +1765,9 @@ public class ProductEditorComp extends Composite implements
|
||||||
|
|
||||||
textComp.startUpdate();
|
textComp.startUpdate();
|
||||||
try {
|
try {
|
||||||
textComp.upper();
|
if (!MixedCaseProductSupport.isMixedCase(getNNNid())) {
|
||||||
|
textComp.upper();
|
||||||
|
}
|
||||||
status1 = frameCheck(true);
|
status1 = frameCheck(true);
|
||||||
boolean status2 = changeTimes();
|
boolean status2 = changeTimes();
|
||||||
if (status1 && status2) {
|
if (status1 && status2) {
|
||||||
|
@ -1935,7 +1941,8 @@ public class ProductEditorComp extends Composite implements
|
||||||
int sel = hoursSpnr.getSelection();
|
int sel = hoursSpnr.getSelection();
|
||||||
int hours = sel / 100;
|
int hours = sel / 100;
|
||||||
int minuteInc = (sel % 100) / 25;
|
int minuteInc = (sel % 100) / 25;
|
||||||
int purgeOffset = hours * TimeUtil.MINUTES_PER_HOUR + minuteInc * 15; // minutes
|
int purgeOffset = (hours * TimeUtil.MINUTES_PER_HOUR)
|
||||||
|
+ (minuteInc * 15); // minutes
|
||||||
|
|
||||||
Date now = SimulatedTime.getSystemTime().getTime();
|
Date now = SimulatedTime.getSystemTime().getTime();
|
||||||
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
|
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
|
||||||
|
@ -1943,7 +1950,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
cal.add(Calendar.MINUTE, purgeOffset);
|
cal.add(Calendar.MINUTE, purgeOffset);
|
||||||
int min = cal.get(Calendar.MINUTE);
|
int min = cal.get(Calendar.MINUTE);
|
||||||
if ((min % 15) >= 1) {
|
if ((min % 15) >= 1) {
|
||||||
cal.set(Calendar.MINUTE, (min / 15 + 1) * 15);
|
cal.set(Calendar.MINUTE, ((min / 15) + 1) * 15);
|
||||||
cal.set(Calendar.SECOND, 0);
|
cal.set(Calendar.SECOND, 0);
|
||||||
}
|
}
|
||||||
this.expireDate = cal.getTime();
|
this.expireDate = cal.getTime();
|
||||||
|
@ -2130,7 +2137,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
long delta = expireTimeSec % roundSec;
|
long delta = expireTimeSec % roundSec;
|
||||||
long baseTime = (expireTimeSec / roundSec) * roundSec
|
long baseTime = (expireTimeSec / roundSec) * roundSec
|
||||||
* TimeUtil.MILLIS_PER_SECOND;
|
* TimeUtil.MILLIS_PER_SECOND;
|
||||||
if (delta / TimeUtil.SECONDS_PER_MINUTE >= 1) {
|
if ((delta / TimeUtil.SECONDS_PER_MINUTE) >= 1) {
|
||||||
expireTime.setTime(baseTime
|
expireTime.setTime(baseTime
|
||||||
+ (roundSec * TimeUtil.MILLIS_PER_SECOND));
|
+ (roundSec * TimeUtil.MILLIS_PER_SECOND));
|
||||||
} else { // within 1 minute, don't add next increment
|
} else { // within 1 minute, don't add next increment
|
||||||
|
@ -2288,7 +2295,8 @@ public class ProductEditorComp extends Composite implements
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
MessageBox mb = new MessageBox(parent.getShell(), SWT.OK
|
MessageBox mb = new MessageBox(parent.getShell(), SWT.OK
|
||||||
| SWT.ICON_WARNING);
|
| SWT.ICON_WARNING);
|
||||||
mb.setText("Formatter AutoWrite failed: " + this.pil);
|
mb.setText("Error");
|
||||||
|
mb.setMessage("Formatter AutoWrite failed: " + this.pil);
|
||||||
mb.open();
|
mb.open();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2422,7 +2430,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
|
|
||||||
private void displayCallToActionsDialog(int callToActionType) {
|
private void displayCallToActionsDialog(int callToActionType) {
|
||||||
// Allow only one of the 3 types of dialogs to be displayed.
|
// Allow only one of the 3 types of dialogs to be displayed.
|
||||||
if (ctaDialog != null && ctaDialog.getShell() != null
|
if ((ctaDialog != null) && (ctaDialog.getShell() != null)
|
||||||
&& !ctaDialog.isDisposed()) {
|
&& !ctaDialog.isDisposed()) {
|
||||||
ctaDialog.bringToTop();
|
ctaDialog.bringToTop();
|
||||||
return;
|
return;
|
||||||
|
@ -2673,6 +2681,10 @@ public class ProductEditorComp extends Composite implements
|
||||||
return productId;
|
return productId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getNNNid() {
|
||||||
|
return textdbPil.substring(3, 6);
|
||||||
|
}
|
||||||
|
|
||||||
public String getProductName() {
|
public String getProductName() {
|
||||||
return productName;
|
return productName;
|
||||||
}
|
}
|
||||||
|
@ -2901,24 +2913,24 @@ public class ProductEditorComp extends Composite implements
|
||||||
|
|
||||||
// Word-wrap the whole selection.
|
// Word-wrap the whole selection.
|
||||||
int curLine = styledText.getLineAtOffset(selectionRange.x);
|
int curLine = styledText.getLineAtOffset(selectionRange.x);
|
||||||
int lastSelIdx = selectionRange.x + selectionRange.y - 1;
|
int lastSelIdx = (selectionRange.x + selectionRange.y) - 1;
|
||||||
int lastLine = styledText.getLineAtOffset(lastSelIdx);
|
int lastLine = styledText.getLineAtOffset(lastSelIdx);
|
||||||
int[] indices = null;
|
int[] indices = null;
|
||||||
while (curLine <= lastLine && curLine < styledText.getLineCount()) {
|
while ((curLine <= lastLine) && (curLine < styledText.getLineCount())) {
|
||||||
int lineOff = styledText.getOffsetAtLine(curLine);
|
int lineOff = styledText.getOffsetAtLine(curLine);
|
||||||
// word wrap a block, and find out how the text length changed.
|
// word wrap a block, and find out how the text length changed.
|
||||||
indices = textComp.wordWrap(styledText, lineOff, wrapColumn);
|
indices = textComp.wordWrap(styledText, lineOff, wrapColumn);
|
||||||
int firstIdx = indices[0];
|
int firstIdx = indices[0];
|
||||||
int lastIdx = indices[1];
|
int lastIdx = indices[1];
|
||||||
int newLen = indices[2];
|
int newLen = indices[2];
|
||||||
int oldLen = 1 + lastIdx - firstIdx;
|
int oldLen = (1 + lastIdx) - firstIdx;
|
||||||
int diff = newLen - oldLen;
|
int diff = newLen - oldLen;
|
||||||
// adjust our endpoint for the change in length
|
// adjust our endpoint for the change in length
|
||||||
lastSelIdx += diff;
|
lastSelIdx += diff;
|
||||||
lastLine = styledText.getLineAtOffset(lastSelIdx);
|
lastLine = styledText.getLineAtOffset(lastSelIdx);
|
||||||
// newLen doesn't include \n, so it can be 0. Don't allow
|
// newLen doesn't include \n, so it can be 0. Don't allow
|
||||||
// firstIdx+newLen-1 to be < firstIdx, or loop becomes infinite.
|
// firstIdx+newLen-1 to be < firstIdx, or loop becomes infinite.
|
||||||
int lastWrapIdx = Math.max(firstIdx, firstIdx + newLen - 1);
|
int lastWrapIdx = Math.max(firstIdx, (firstIdx + newLen) - 1);
|
||||||
// move down to the next unwrapped line
|
// move down to the next unwrapped line
|
||||||
curLine = styledText.getLineAtOffset(lastWrapIdx) + 1;
|
curLine = styledText.getLineAtOffset(lastWrapIdx) + 1;
|
||||||
}
|
}
|
||||||
|
@ -2979,7 +2991,7 @@ public class ProductEditorComp extends Composite implements
|
||||||
String str = null;
|
String str = null;
|
||||||
|
|
||||||
Object obj = productDefinition.get(key);
|
Object obj = productDefinition.get(key);
|
||||||
if (obj != null && obj instanceof Collection) {
|
if ((obj != null) && (obj instanceof Collection)) {
|
||||||
Collection<?> collection = (Collection<?>) obj;
|
Collection<?> collection = (Collection<?>) obj;
|
||||||
str = (String) (collection.toArray())[0];
|
str = (String) (collection.toArray())[0];
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -22,8 +22,6 @@ package com.raytheon.viz.grid.rsc.general;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.Executor;
|
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
|
|
||||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
|
@ -31,6 +29,7 @@ import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.common.time.DataTime;
|
import com.raytheon.uf.common.time.DataTime;
|
||||||
import com.raytheon.uf.viz.core.exception.VizException;
|
import com.raytheon.uf.viz.core.exception.VizException;
|
||||||
|
import com.raytheon.uf.viz.core.jobs.JobPool;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -48,6 +47,8 @@ import com.raytheon.uf.viz.core.exception.VizException;
|
||||||
* Jun 24, 2013 2140 randerso Moved safe name code into AbstractVizResource
|
* Jun 24, 2013 2140 randerso Moved safe name code into AbstractVizResource
|
||||||
* Oct 07, 2014 3668 bclement uses executor instead of eclipse job
|
* Oct 07, 2014 3668 bclement uses executor instead of eclipse job
|
||||||
* renamed to GridDataRequestRunner
|
* renamed to GridDataRequestRunner
|
||||||
|
* Oct 23, 2014 3668 bsteffen replace executor with job pool so user
|
||||||
|
* sees progress.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -59,8 +60,8 @@ class GridDataRequestRunner implements Runnable {
|
||||||
private static final int POOL_SIZE = Integer.getInteger(
|
private static final int POOL_SIZE = Integer.getInteger(
|
||||||
"grid.request.pool.size", 10);
|
"grid.request.pool.size", 10);
|
||||||
|
|
||||||
private static final Executor executor = Executors
|
private static final JobPool jobPool = new JobPool("Requesting Grid Data",
|
||||||
.newFixedThreadPool(POOL_SIZE);
|
POOL_SIZE);
|
||||||
|
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
.getHandler(GridDataRequestRunner.class);
|
.getHandler(GridDataRequestRunner.class);
|
||||||
|
@ -95,7 +96,15 @@ class GridDataRequestRunner implements Runnable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private volatile boolean cancelled = false;
|
/**
|
||||||
|
* This class is not designed to handle multiple requests concurrently. To
|
||||||
|
* ensure this doesn't happen we track when it is scheduled and do not
|
||||||
|
* schedule again. It would have been simpler to synchronize the run method
|
||||||
|
* but that ties up threads from the pool that other resources should use.
|
||||||
|
* So we don't leave dangling requests this should only be modified while
|
||||||
|
* synchronized on requests.
|
||||||
|
*/
|
||||||
|
private volatile boolean scheduled = false;
|
||||||
|
|
||||||
private AbstractGridResource<?> resource;
|
private AbstractGridResource<?> resource;
|
||||||
|
|
||||||
|
@ -111,8 +120,10 @@ class GridDataRequestRunner implements Runnable {
|
||||||
try {
|
try {
|
||||||
request.gridData = resource.getData(request.time, request.pdos);
|
request.gridData = resource.getData(request.time, request.pdos);
|
||||||
if (request.gridData == null) {
|
if (request.gridData == null) {
|
||||||
// need to remove unfulfillable requests to avoid infinite
|
/*
|
||||||
// loop.
|
* need to remove unfulfillable requests to avoid infinite
|
||||||
|
* loop.
|
||||||
|
*/
|
||||||
synchronized (requests) {
|
synchronized (requests) {
|
||||||
requests.remove(request);
|
requests.remove(request);
|
||||||
}
|
}
|
||||||
|
@ -123,9 +134,6 @@ class GridDataRequestRunner implements Runnable {
|
||||||
request.exception = e;
|
request.exception = e;
|
||||||
resource.issueRefresh();
|
resource.issueRefresh();
|
||||||
}
|
}
|
||||||
if (cancelled) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,6 +149,7 @@ class GridDataRequestRunner implements Runnable {
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
scheduled = false;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -172,21 +181,14 @@ class GridDataRequestRunner implements Runnable {
|
||||||
if ((request.exception != null) && !request.exceptionHandled) {
|
if ((request.exception != null) && !request.exceptionHandled) {
|
||||||
handleExceptions();
|
handleExceptions();
|
||||||
}
|
}
|
||||||
}
|
if (!scheduled && request.shouldRequest()) {
|
||||||
if (request.shouldRequest()) {
|
scheduled = true;
|
||||||
this.schedule();
|
jobPool.schedule(this);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* send current requests
|
|
||||||
*/
|
|
||||||
private void schedule() {
|
|
||||||
cancelled = false;
|
|
||||||
executor.execute(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void handleExceptions() {
|
private void handleExceptions() {
|
||||||
|
|
||||||
List<GridDataRequest> failedRequests = new ArrayList<GridDataRequest>(
|
List<GridDataRequest> failedRequests = new ArrayList<GridDataRequest>(
|
||||||
|
@ -266,15 +268,10 @@ class GridDataRequestRunner implements Runnable {
|
||||||
public void stopAndClear() {
|
public void stopAndClear() {
|
||||||
synchronized (requests) {
|
synchronized (requests) {
|
||||||
requests.clear();
|
requests.clear();
|
||||||
|
if (jobPool.cancel(this)) {
|
||||||
|
scheduled = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
this.cancel();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* cancel current request
|
|
||||||
*/
|
|
||||||
private void cancel() {
|
|
||||||
cancelled = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
-Dthrift.stream.maxsize=200
|
-Dthrift.stream.maxsize=200
|
||||||
-Dviz.memory.warn.threshold=98
|
-Dviz.memory.warn.threshold=98
|
||||||
-XX:+UnlockExperimentalVMOptions
|
-XX:+UnlockExperimentalVMOptions
|
||||||
-XX:G1HeapRegionSize=1
|
-XX:G1HeapRegionSize=4M
|
||||||
-XX:InitiatingHeapOccupancyPercent=25
|
-XX:InitiatingHeapOccupancyPercent=25
|
||||||
-XX:G1MixedGCCountTarget=16
|
-XX:G1MixedGCCountTarget=16
|
||||||
-XX:G1MixedGCLiveThresholdPercent=25
|
-XX:G1MixedGCLiveThresholdPercent=25
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
-Dviz.memory.warn.threshold=99
|
-Dviz.memory.warn.threshold=99
|
||||||
-XX:MaxDirectMemorySize=1G
|
-XX:MaxDirectMemorySize=1G
|
||||||
-XX:+UnlockExperimentalVMOptions
|
-XX:+UnlockExperimentalVMOptions
|
||||||
-XX:G1HeapRegionSize=1
|
-XX:G1HeapRegionSize=4M
|
||||||
-XX:InitiatingHeapOccupancyPercent=25
|
-XX:InitiatingHeapOccupancyPercent=25
|
||||||
-XX:G1MixedGCCountTarget=16
|
-XX:G1MixedGCCountTarget=16
|
||||||
-XX:G1MixedGCLiveThresholdPercent=25
|
-XX:G1MixedGCLiveThresholdPercent=25
|
||||||
|
|
|
@ -21,8 +21,11 @@ package com.raytheon.viz.satellite.rsc;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import javax.measure.Measure;
|
import javax.measure.Measure;
|
||||||
|
|
||||||
|
@ -50,10 +53,16 @@ import com.raytheon.uf.viz.core.rsc.LoadProperties;
|
||||||
import com.raytheon.uf.viz.core.rsc.ResourceList;
|
import com.raytheon.uf.viz.core.rsc.ResourceList;
|
||||||
import com.raytheon.uf.viz.core.rsc.capabilities.ColorMapCapability;
|
import com.raytheon.uf.viz.core.rsc.capabilities.ColorMapCapability;
|
||||||
import com.raytheon.uf.viz.core.rsc.capabilities.ImagingCapability;
|
import com.raytheon.uf.viz.core.rsc.capabilities.ImagingCapability;
|
||||||
|
import com.raytheon.uf.viz.core.rsc.interrogation.Interrogatable;
|
||||||
|
import com.raytheon.uf.viz.core.rsc.interrogation.InterrogateMap;
|
||||||
|
import com.raytheon.uf.viz.core.rsc.interrogation.InterrogationKey;
|
||||||
|
import com.raytheon.uf.viz.core.rsc.interrogation.Interrogator;
|
||||||
import com.vividsolutions.jts.geom.Coordinate;
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* Displays multiple satellite resources in a single resource. Uses graphics
|
||||||
|
* mosaicing to combine images so that alhpa blending correctly treats multiple
|
||||||
|
* images as a single layer when applying the alpha.
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
* SOFTWARE HISTORY
|
* SOFTWARE HISTORY
|
||||||
|
@ -67,6 +76,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* values and returns NaN now
|
* values and returns NaN now
|
||||||
* Nov 18, 2013 2544 bsteffen Override recycleInternal
|
* Nov 18, 2013 2544 bsteffen Override recycleInternal
|
||||||
* Nov 20, 2013 2492 bsteffen Update inspect to use Measure objects
|
* Nov 20, 2013 2492 bsteffen Update inspect to use Measure objects
|
||||||
|
* Oct 27, 2014 3681 bsteffen Implement Interrogatable
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -76,7 +86,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
|
|
||||||
public class SatBlendedResource extends
|
public class SatBlendedResource extends
|
||||||
AbstractVizResource<SatBlendedResourceData, MapDescriptor> implements
|
AbstractVizResource<SatBlendedResourceData, MapDescriptor> implements
|
||||||
IResourceGroup, IRefreshListener, IResourceDataChanged {
|
IResourceGroup, IRefreshListener, IResourceDataChanged, Interrogatable {
|
||||||
|
|
||||||
private IMosaicImage mosaicImage = null;
|
private IMosaicImage mosaicImage = null;
|
||||||
|
|
||||||
|
@ -323,4 +333,40 @@ public class SatBlendedResource extends
|
||||||
public void resourceChanged(ChangeType type, Object object) {
|
public void resourceChanged(ChangeType type, Object object) {
|
||||||
refresh();
|
refresh();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<InterrogationKey<?>> getInterrogationKeys() {
|
||||||
|
Set<InterrogationKey<?>> set = new HashSet<>();
|
||||||
|
List<Interrogatable> resourceList = getResourceList()
|
||||||
|
.getResourcesByTypeAsType(Interrogatable.class);
|
||||||
|
for (Interrogatable resource : resourceList) {
|
||||||
|
set.addAll(resource.getInterrogationKeys());
|
||||||
|
}
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InterrogateMap interrogate(ReferencedCoordinate coordinate,
|
||||||
|
DataTime time, InterrogationKey<?>... keys) {
|
||||||
|
if (!Arrays.asList(keys).contains(Interrogator.VALUE)) {
|
||||||
|
keys = Arrays.copyOf(keys, keys.length + 1);
|
||||||
|
keys[keys.length - 1] = Interrogator.VALUE;
|
||||||
|
}
|
||||||
|
List<Interrogatable> list = getResourceList().getResourcesByTypeAsType(
|
||||||
|
Interrogatable.class);
|
||||||
|
Collections.reverse(list);
|
||||||
|
for (Interrogatable resource : list) {
|
||||||
|
InterrogateMap result = resource
|
||||||
|
.interrogate(
|
||||||
|
coordinate, time, keys);
|
||||||
|
Measure<? extends Number, ?> value = result.get(Interrogator.VALUE);
|
||||||
|
if (value != null) {
|
||||||
|
double quantity = value.getValue().doubleValue();
|
||||||
|
if (!Double.isNaN(quantity)) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new InterrogateMap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,6 +115,7 @@ import org.eclipse.ui.menus.IMenuService;
|
||||||
import com.raytheon.uf.common.activetable.SendPracticeProductRequest;
|
import com.raytheon.uf.common.activetable.SendPracticeProductRequest;
|
||||||
import com.raytheon.uf.common.dataplugin.text.RemoteRetrievalResponse;
|
import com.raytheon.uf.common.dataplugin.text.RemoteRetrievalResponse;
|
||||||
import com.raytheon.uf.common.dataplugin.text.alarms.AlarmAlertProduct;
|
import com.raytheon.uf.common.dataplugin.text.alarms.AlarmAlertProduct;
|
||||||
|
import com.raytheon.uf.common.dataplugin.text.db.MixedCaseProductSupport;
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.OperationalStdTextProduct;
|
import com.raytheon.uf.common.dataplugin.text.db.OperationalStdTextProduct;
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.PracticeStdTextProduct;
|
import com.raytheon.uf.common.dataplugin.text.db.PracticeStdTextProduct;
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
|
import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct;
|
||||||
|
@ -341,6 +342,7 @@ import com.raytheon.viz.ui.dialogs.SWTMessageBox;
|
||||||
* 13May2014 2536 bclement moved WMO Header to common, switched from TimeTools to TimeUtil
|
* 13May2014 2536 bclement moved WMO Header to common, switched from TimeTools to TimeUtil
|
||||||
* 11Sep2014 3580 mapeters Replaced SerializationTuil usage with JAXBManager,
|
* 11Sep2014 3580 mapeters Replaced SerializationTuil usage with JAXBManager,
|
||||||
* removed IQueryTransport usage (no longer exists).
|
* removed IQueryTransport usage (no longer exists).
|
||||||
|
* 20Oct2014 3685 randerso Made conversion to upper case conditional on product id
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -1096,10 +1098,9 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
* Search and replace dialog.
|
* Search and replace dialog.
|
||||||
*/
|
*/
|
||||||
private SearchReplaceDlg searchReplaceDlg;
|
private SearchReplaceDlg searchReplaceDlg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Flag indicating if the overwrite mode has been set for
|
* Flag indicating if the overwrite mode has been set for template editing.
|
||||||
* template editing.
|
|
||||||
*/
|
*/
|
||||||
private boolean isTemplateOverwriteModeSet = false;
|
private boolean isTemplateOverwriteModeSet = false;
|
||||||
|
|
||||||
|
@ -2065,7 +2066,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
overStrikeItem.addSelectionListener(new SelectionAdapter() {
|
overStrikeItem.addSelectionListener(new SelectionAdapter() {
|
||||||
@Override
|
@Override
|
||||||
public void widgetSelected(SelectionEvent event) {
|
public void widgetSelected(SelectionEvent event) {
|
||||||
if (!AFOSParser.isTemplate) {
|
if (!AFOSParser.isTemplate) {
|
||||||
if (overwriteMode == true) {
|
if (overwriteMode == true) {
|
||||||
overwriteMode = false;
|
overwriteMode = false;
|
||||||
editorInsertCmb.select(INSERT_TEXT);
|
editorInsertCmb.select(INSERT_TEXT);
|
||||||
|
@ -3714,14 +3715,14 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
editorInsertCmb.addSelectionListener(new SelectionAdapter() {
|
editorInsertCmb.addSelectionListener(new SelectionAdapter() {
|
||||||
@Override
|
@Override
|
||||||
public void widgetSelected(SelectionEvent event) {
|
public void widgetSelected(SelectionEvent event) {
|
||||||
if (!AFOSParser.isTemplate) {
|
if (!AFOSParser.isTemplate) {
|
||||||
if (editorInsertCmb.getSelectionIndex() == INSERT_TEXT
|
if ((editorInsertCmb.getSelectionIndex() == INSERT_TEXT)
|
||||||
&& overwriteMode == true) {
|
&& (overwriteMode == true)) {
|
||||||
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
||||||
overwriteMode = false;
|
overwriteMode = false;
|
||||||
overStrikeItem.setSelection(false);
|
overStrikeItem.setSelection(false);
|
||||||
} else if (editorInsertCmb.getSelectionIndex() == OVERWRITE_TEXT
|
} else if ((editorInsertCmb.getSelectionIndex() == OVERWRITE_TEXT)
|
||||||
&& overwriteMode == false) {
|
&& (overwriteMode == false)) {
|
||||||
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
||||||
overwriteMode = true;
|
overwriteMode = true;
|
||||||
overStrikeItem.setSelection(true);
|
overStrikeItem.setSelection(true);
|
||||||
|
@ -3887,7 +3888,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
event.doit = false; // Ignore Ctrl+Shift+PageDown
|
event.doit = false; // Ignore Ctrl+Shift+PageDown
|
||||||
} else if (event.keyCode == SWT.INSERT) {
|
} else if (event.keyCode == SWT.INSERT) {
|
||||||
// Ins key on the keypad
|
// Ins key on the keypad
|
||||||
if (AFOSParser.isTemplate) {
|
if (AFOSParser.isTemplate) {
|
||||||
if (overwriteMode == true) {
|
if (overwriteMode == true) {
|
||||||
overwriteMode = false;
|
overwriteMode = false;
|
||||||
overStrikeItem.setSelection(false);
|
overStrikeItem.setSelection(false);
|
||||||
|
@ -3917,43 +3918,43 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
if (event.keyCode == SWT.BS) {
|
if (event.keyCode == SWT.BS) {
|
||||||
event.doit = false;
|
event.doit = false;
|
||||||
int currentPos = textEditor.getCaretOffset();
|
int currentPos = textEditor.getCaretOffset();
|
||||||
String textUpToCaret = textEditor.getText().substring(0, currentPos);
|
String textUpToCaret = textEditor.getText().substring(
|
||||||
int leftMost=textUpToCaret.lastIndexOf("[") + 1;
|
0, currentPos);
|
||||||
int rightMost = textEditor.getText().indexOf("]",currentPos);
|
int leftMost = textUpToCaret.lastIndexOf("[") + 1;
|
||||||
|
int rightMost = textEditor.getText().indexOf("]",
|
||||||
|
currentPos);
|
||||||
int editableTextWidth = rightMost - leftMost;
|
int editableTextWidth = rightMost - leftMost;
|
||||||
String leftPart="";
|
String leftPart = "";
|
||||||
String rightPart="";
|
String rightPart = "";
|
||||||
if (currentPos == leftMost) {
|
if (currentPos == leftMost) {
|
||||||
leftPart = "";
|
leftPart = "";
|
||||||
rightPart = textEditor.getText().substring(
|
|
||||||
currentPos, rightMost);
|
|
||||||
textEditor.setCaretOffset(leftMost);
|
|
||||||
}
|
|
||||||
else if (currentPos > leftMost && currentPos <= rightMost){
|
|
||||||
leftPart = textEditor.getText().substring(
|
|
||||||
leftMost, currentPos - 1);
|
|
||||||
rightPart = textEditor.getText().substring(
|
rightPart = textEditor.getText().substring(
|
||||||
currentPos, rightMost);
|
currentPos, rightMost);
|
||||||
}
|
textEditor.setCaretOffset(leftMost);
|
||||||
else if (currentPos == rightMost) {
|
} else if ((currentPos > leftMost)
|
||||||
leftPart = textEditor.getText().substring(
|
&& (currentPos <= rightMost)) {
|
||||||
leftMost, currentPos-1);
|
leftPart = textEditor.getText().substring(leftMost,
|
||||||
|
currentPos - 1);
|
||||||
|
rightPart = textEditor.getText().substring(
|
||||||
|
currentPos, rightMost);
|
||||||
|
} else if (currentPos == rightMost) {
|
||||||
|
leftPart = textEditor.getText().substring(leftMost,
|
||||||
|
currentPos - 1);
|
||||||
rightPart = "";
|
rightPart = "";
|
||||||
}
|
}
|
||||||
String newString = leftPart + rightPart;
|
String newString = leftPart + rightPart;
|
||||||
int neededPadSpaces = editableTextWidth - newString.length();
|
int neededPadSpaces = editableTextWidth
|
||||||
|
- newString.length();
|
||||||
String newPaddedString = String.format("%1$-"
|
String newPaddedString = String.format("%1$-"
|
||||||
+ (neededPadSpaces+1) + "s", newString);
|
+ (neededPadSpaces + 1) + "s", newString);
|
||||||
String spacedoutString = String.format("%1$-"
|
String spacedoutString = String.format("%1$-"
|
||||||
+ (editableTextWidth) + "s",
|
+ (editableTextWidth) + "s", " ");
|
||||||
" ");
|
|
||||||
textEditor.replaceTextRange(leftMost,
|
textEditor.replaceTextRange(leftMost,
|
||||||
spacedoutString.length(), spacedoutString);
|
spacedoutString.length(), spacedoutString);
|
||||||
textEditor.replaceTextRange(leftMost,
|
textEditor.replaceTextRange(leftMost,
|
||||||
newPaddedString.length(), newPaddedString);
|
newPaddedString.length(), newPaddedString);
|
||||||
textEditor.setCaretOffset(currentPos - 1);
|
textEditor.setCaretOffset(currentPos - 1);
|
||||||
|
|
||||||
|
|
||||||
} else if (event.keyCode == SWT.TAB) {
|
} else if (event.keyCode == SWT.TAB) {
|
||||||
if (!isTemplateOverwriteModeSet) {
|
if (!isTemplateOverwriteModeSet) {
|
||||||
if (overwriteMode) {
|
if (overwriteMode) {
|
||||||
|
@ -3968,11 +3969,11 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
String textUpToCaret = textEditor.getText().substring(
|
String textUpToCaret = textEditor.getText().substring(
|
||||||
0, currentPos);
|
0, currentPos);
|
||||||
int openBracketPos = textUpToCaret.lastIndexOf("[");
|
int openBracketPos = textUpToCaret.lastIndexOf("[");
|
||||||
openBracketPos = textEditor.getText().indexOf("[", currentPos);
|
openBracketPos = textEditor.getText().indexOf("[",
|
||||||
|
currentPos);
|
||||||
textEditor.setCaretOffset(openBracketPos + 1);
|
textEditor.setCaretOffset(openBracketPos + 1);
|
||||||
}
|
} else if (((event.keyCode >= 97) && (event.keyCode <= 122))
|
||||||
else if (event.keyCode>=97 && event.keyCode <=122 ||
|
|| ((event.keyCode >= 48) && (event.keyCode <= 57))) {
|
||||||
event.keyCode>=48 && event.keyCode <=57){
|
|
||||||
event.doit = true;
|
event.doit = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4160,7 +4161,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
* Enter the text editor mode.
|
* Enter the text editor mode.
|
||||||
*/
|
*/
|
||||||
private void enterEditor() {
|
private void enterEditor() {
|
||||||
initTemplateOverwriteMode();
|
initTemplateOverwriteMode();
|
||||||
StdTextProduct product = TextDisplayModel.getInstance()
|
StdTextProduct product = TextDisplayModel.getInstance()
|
||||||
.getStdTextProduct(token);
|
.getStdTextProduct(token);
|
||||||
if ((product != null)
|
if ((product != null)
|
||||||
|
@ -4401,7 +4402,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
.getProductCategory(token)
|
.getProductCategory(token)
|
||||||
+ tdm.getProductDesignator(token);
|
+ tdm.getProductDesignator(token);
|
||||||
// Set the header text field.
|
// Set the header text field.
|
||||||
if (bbbid.equals("NOR") || (bbbid.isEmpty() && tdm.getAfosPil(token) != null)) {
|
if (bbbid.equals("NOR")
|
||||||
|
|| (bbbid.isEmpty() && (tdm.getAfosPil(token) != null))) {
|
||||||
String wmoId = tdm.getWmoId(token);
|
String wmoId = tdm.getWmoId(token);
|
||||||
wmoId = (wmoId.length() > 0 ? wmoId : "-");
|
wmoId = (wmoId.length() > 0 ? wmoId : "-");
|
||||||
String siteId = tdm.getSiteId(token);
|
String siteId = tdm.getSiteId(token);
|
||||||
|
@ -4902,7 +4904,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
if (warnGenFlag) {
|
if (warnGenFlag) {
|
||||||
QCConfirmationMsg qcMsg = new QCConfirmationMsg();
|
QCConfirmationMsg qcMsg = new QCConfirmationMsg();
|
||||||
if (!qcMsg.checkWarningInfo(headerTF.getText().toUpperCase(),
|
if (!qcMsg.checkWarningInfo(headerTF.getText().toUpperCase(),
|
||||||
textEditor.getText().toUpperCase(), prod.getNnnid())) {
|
MixedCaseProductSupport.conditionalToUpper(prod.getNnnid(),
|
||||||
|
textEditor.getText()), prod.getNnnid())) {
|
||||||
WarnGenConfirmationDlg wgcd = new WarnGenConfirmationDlg(shell,
|
WarnGenConfirmationDlg wgcd = new WarnGenConfirmationDlg(shell,
|
||||||
qcMsg.getTitle(), qcMsg.getProductMessage(),
|
qcMsg.getTitle(), qcMsg.getProductMessage(),
|
||||||
qcMsg.getModeMessage());
|
qcMsg.getModeMessage());
|
||||||
|
@ -4986,7 +4989,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
StdTextProduct prod = getStdTextProduct();
|
StdTextProduct prod = getStdTextProduct();
|
||||||
EmergencyConfirmationMsg emergencyMsg = new EmergencyConfirmationMsg();
|
EmergencyConfirmationMsg emergencyMsg = new EmergencyConfirmationMsg();
|
||||||
if (emergencyMsg.checkWarningInfo(headerTF.getText().toUpperCase(),
|
if (emergencyMsg.checkWarningInfo(headerTF.getText().toUpperCase(),
|
||||||
textEditor.getText().toUpperCase(), prod.getNnnid()) == false) {
|
MixedCaseProductSupport.conditionalToUpper(prod.getNnnid(),
|
||||||
|
textEditor.getText()), prod.getNnnid()) == false) {
|
||||||
|
|
||||||
WarnGenConfirmationDlg wgcd = new WarnGenConfirmationDlg(shell,
|
WarnGenConfirmationDlg wgcd = new WarnGenConfirmationDlg(shell,
|
||||||
emergencyMsg.getTitle(), emergencyMsg.getProductMessage(),
|
emergencyMsg.getTitle(), emergencyMsg.getProductMessage(),
|
||||||
|
@ -5016,8 +5020,9 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
*/
|
*/
|
||||||
private void warngenCloseCallback(boolean resend) {
|
private void warngenCloseCallback(boolean resend) {
|
||||||
|
|
||||||
// DR14553 (make upper case in product)
|
StdTextProduct prod = getStdTextProduct();
|
||||||
String body = textEditor.getText().toUpperCase();
|
String body = MixedCaseProductSupport.conditionalToUpper(
|
||||||
|
prod.getNnnid(), textEditor.getText());
|
||||||
CAVEMode mode = CAVEMode.getMode();
|
CAVEMode mode = CAVEMode.getMode();
|
||||||
boolean isOperational = (CAVEMode.OPERATIONAL.equals(mode) || CAVEMode.TEST
|
boolean isOperational = (CAVEMode.OPERATIONAL.equals(mode) || CAVEMode.TEST
|
||||||
.equals(mode));
|
.equals(mode));
|
||||||
|
@ -5031,7 +5036,6 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
inEditMode = false;
|
inEditMode = false;
|
||||||
}
|
}
|
||||||
if (!resend) {
|
if (!resend) {
|
||||||
StdTextProduct prod = getStdTextProduct();
|
|
||||||
OUPTestRequest testReq = new OUPTestRequest();
|
OUPTestRequest testReq = new OUPTestRequest();
|
||||||
testReq.setOupRequest(createOUPRequest(prod,
|
testReq.setOupRequest(createOUPRequest(prod,
|
||||||
prod.getProduct()));
|
prod.getProduct()));
|
||||||
|
@ -5075,8 +5079,6 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
|
|
||||||
String product = TextDisplayModel.getInstance().getProduct(
|
String product = TextDisplayModel.getInstance().getProduct(
|
||||||
token);
|
token);
|
||||||
// TODO: Should not need to call getProduct and the like twice.
|
|
||||||
StdTextProduct prod = getStdTextProduct();
|
|
||||||
|
|
||||||
OUPRequest req = createOUPRequest(prod, product);
|
OUPRequest req = createOUPRequest(prod, product);
|
||||||
|
|
||||||
|
@ -5093,8 +5095,10 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
if (!resend) {
|
if (!resend) {
|
||||||
body = VtecUtil.getVtec(removeSoftReturns(textEditor
|
body = VtecUtil
|
||||||
.getText()));
|
.getVtec(removeSoftReturns(MixedCaseProductSupport
|
||||||
|
.conditionalToUpper(prod.getNnnid(),
|
||||||
|
textEditor.getText())));
|
||||||
}
|
}
|
||||||
updateTextEditor(body);
|
updateTextEditor(body);
|
||||||
if ((inEditMode || resend)
|
if ((inEditMode || resend)
|
||||||
|
@ -5168,7 +5172,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
private static String copyEtn(String from, String to) {
|
private static String copyEtn(String from, String to) {
|
||||||
VtecObject fromVtec = VtecUtil.parseMessage(from);
|
VtecObject fromVtec = VtecUtil.parseMessage(from);
|
||||||
|
|
||||||
if (fromVtec != null && "NEW".equals(fromVtec.getAction())) {
|
if ((fromVtec != null) && "NEW".equals(fromVtec.getAction())) {
|
||||||
VtecObject toVtec = VtecUtil.parseMessage(to);
|
VtecObject toVtec = VtecUtil.parseMessage(to);
|
||||||
if (toVtec != null) {
|
if (toVtec != null) {
|
||||||
toVtec.setSequence(fromVtec.getSequence());
|
toVtec.setSequence(fromVtec.getSequence());
|
||||||
|
@ -5204,8 +5208,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
body.append("\n");
|
body.append("\n");
|
||||||
}
|
}
|
||||||
body.append(textEditor.getText().trim());
|
body.append(textEditor.getText().trim());
|
||||||
|
|
||||||
if (AFOSParser.isTemplate){
|
if (AFOSParser.isTemplate) {
|
||||||
return removePreformat(body.toString());
|
return removePreformat(body.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5279,7 +5283,9 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
|
|
||||||
String header = headerTF.getText().toUpperCase();
|
String header = headerTF.getText().toUpperCase();
|
||||||
String body = resend ? resendMessage()
|
String body = resend ? resendMessage()
|
||||||
: removeSoftReturns(textEditor.getText().toUpperCase());
|
: removeSoftReturns(MixedCaseProductSupport
|
||||||
|
.conditionalToUpper(product.getNnnid(),
|
||||||
|
textEditor.getText()));
|
||||||
// verify text
|
// verify text
|
||||||
headerTF.setText(header);
|
headerTF.setText(header);
|
||||||
updateTextEditor(body);
|
updateTextEditor(body);
|
||||||
|
@ -5302,10 +5308,11 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
if (!isAutoSave) {
|
if (!isAutoSave) {
|
||||||
if (!resend) {
|
if (!resend) {
|
||||||
// If not a resend, set the DDHHMM field to the current time
|
// If not a resend, set the DDHHMM field to the current time
|
||||||
if (productText.startsWith("- -") && productText.contains("DDHHMM")) {
|
if (productText.startsWith("- -")
|
||||||
|
&& productText.contains("DDHHMM")) {
|
||||||
productText = getUnofficeProduct(currentDate);
|
productText = getUnofficeProduct(currentDate);
|
||||||
} else {
|
} else {
|
||||||
productText = replaceDDHHMM(productText, currentDate);
|
productText = replaceDDHHMM(productText, currentDate);
|
||||||
}
|
}
|
||||||
VtecObject vtecObj = VtecUtil.parseMessage(productText);
|
VtecObject vtecObj = VtecUtil.parseMessage(productText);
|
||||||
if (warnGenFlag) {
|
if (warnGenFlag) {
|
||||||
|
@ -5341,7 +5348,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
productText += ATTACHMENT_STR
|
productText += ATTACHMENT_STR
|
||||||
+ statusBarLabel.getText().substring(startIndex);
|
+ statusBarLabel.getText().substring(startIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (AFOSParser.isTemplate) {
|
if (AFOSParser.isTemplate) {
|
||||||
productText = removePreformat(productText);
|
productText = removePreformat(productText);
|
||||||
}
|
}
|
||||||
|
@ -5909,14 +5916,15 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
if (m.find()) {
|
if (m.find()) {
|
||||||
SimpleDateFormat headerFormat = new SimpleDateFormat(
|
SimpleDateFormat headerFormat = new SimpleDateFormat(
|
||||||
"hmm a z EEE MMM d yyyy");
|
"hmm a z EEE MMM d yyyy");
|
||||||
TimeZone tz = TextWarningConstants.timeZoneShortNameMap
|
TimeZone tz = TextWarningConstants.timeZoneShortNameMap.get(m
|
||||||
.get(m.group(5));
|
.group(5));
|
||||||
if (tz != null) {
|
if (tz != null) {
|
||||||
headerFormat.setTimeZone(tz);
|
headerFormat.setTimeZone(tz);
|
||||||
product = product.replace(m.group(1), headerFormat.format(now)
|
product = product.replace(m.group(1), headerFormat.format(now)
|
||||||
.toUpperCase());
|
.toUpperCase());
|
||||||
} else {
|
} else {
|
||||||
statusHandler.warn("Could not sync MND header time because the time zone could not be determined. Will proceed with save/send.");
|
statusHandler
|
||||||
|
.warn("Could not sync MND header time because the time zone could not be determined. Will proceed with save/send.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return product;
|
return product;
|
||||||
|
@ -6944,9 +6952,9 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
}
|
}
|
||||||
String textProduct = product.getASCIIProduct();
|
String textProduct = product.getASCIIProduct();
|
||||||
if ((product.getNnnid() + product.getXxxid())
|
if ((product.getNnnid() + product.getXxxid())
|
||||||
.startsWith(AFOSParser.DRAFT_PIL) ||
|
.startsWith(AFOSParser.DRAFT_PIL)
|
||||||
(product.getNnnid() + product.getXxxid())
|
|| (product.getNnnid() + product.getXxxid())
|
||||||
.startsWith(AFOSParser.MCP_NNN )) {
|
.startsWith(AFOSParser.MCP_NNN)) {
|
||||||
String[] nnnxxx = TextDisplayModel.getNnnXxx(textProduct);
|
String[] nnnxxx = TextDisplayModel.getNnnXxx(textProduct);
|
||||||
String operationalPil = nnnxxx[0] + nnnxxx[1];
|
String operationalPil = nnnxxx[0] + nnnxxx[1];
|
||||||
String siteNode = SiteAbbreviationUtil.getSiteNode(nnnxxx[1]);
|
String siteNode = SiteAbbreviationUtil.getSiteNode(nnnxxx[1]);
|
||||||
|
@ -8576,7 +8584,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
return bbb;
|
return bbb;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initTemplateOverwriteMode() {
|
private void initTemplateOverwriteMode() {
|
||||||
if (AFOSParser.isTemplate) {
|
if (AFOSParser.isTemplate) {
|
||||||
editorInsertCmb.setEnabled(false);
|
editorInsertCmb.setEnabled(false);
|
||||||
|
@ -8594,8 +8602,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
isTemplateOverwriteModeSet = true;
|
isTemplateOverwriteModeSet = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
editorInsertCmb.setEnabled(true);
|
editorInsertCmb.setEnabled(true);
|
||||||
overStrikeItem.setEnabled(true);
|
overStrikeItem.setEnabled(true);
|
||||||
editorCutBtn.setEnabled(true);
|
editorCutBtn.setEnabled(true);
|
||||||
|
@ -8603,26 +8610,26 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
editorPasteBtn.setEnabled(true);
|
editorPasteBtn.setEnabled(true);
|
||||||
editorFillBtn.setEnabled(true);
|
editorFillBtn.setEnabled(true);
|
||||||
editorAttachBtn.setEnabled(true);
|
editorAttachBtn.setEnabled(true);
|
||||||
if (isTemplateOverwriteModeSet && !overwriteMode){
|
if (isTemplateOverwriteModeSet && !overwriteMode) {
|
||||||
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
||||||
isTemplateOverwriteModeSet=false;
|
isTemplateOverwriteModeSet = false;
|
||||||
}
|
}
|
||||||
if (!isTemplateOverwriteModeSet && overwriteMode){
|
if (!isTemplateOverwriteModeSet && overwriteMode) {
|
||||||
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
textEditor.invokeAction(ST.TOGGLE_OVERWRITE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String removePreformat(String preformattedText) {
|
private String removePreformat(String preformattedText) {
|
||||||
String modifiedText = preformattedText.replaceAll("\\[|\\]", " ");
|
String modifiedText = preformattedText.replaceAll("\\[|\\]", " ");
|
||||||
modifiedText = removeSoftReturns(modifiedText);
|
modifiedText = removeSoftReturns(modifiedText);
|
||||||
return modifiedText;
|
return modifiedText;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getUnofficeProduct(String currDate)
|
private String getUnofficeProduct(String currDate) {
|
||||||
{
|
StdTextProduct textProd = TextDisplayModel.getInstance()
|
||||||
StdTextProduct textProd = TextDisplayModel.getInstance().getStdTextProduct(token);
|
.getStdTextProduct(token);
|
||||||
|
|
||||||
String header = headerTF.getText();
|
String header = headerTF.getText();
|
||||||
|
|
||||||
String nnn = textProd.getNnnid();
|
String nnn = textProd.getNnnid();
|
||||||
|
@ -8630,21 +8637,23 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
||||||
String nnnXxx = nnn + xxx;
|
String nnnXxx = nnn + xxx;
|
||||||
String site = SiteMap.getInstance().getSite4LetterId(
|
String site = SiteMap.getInstance().getSite4LetterId(
|
||||||
textProd.getCccid());
|
textProd.getCccid());
|
||||||
String wmoId = textProd.getCccid() + nnnXxx + " "
|
String wmoId = textProd.getCccid() + nnnXxx + " " + getAddressee()
|
||||||
+ getAddressee() + "\nTTAA00 " + site;
|
+ "\nTTAA00 " + site;
|
||||||
|
|
||||||
header = header.replaceFirst("\n" + nnnXxx, "");
|
header = header.replaceFirst("\n" + nnnXxx, "");
|
||||||
header = header.replaceFirst("-", "ZCZC");
|
header = header.replaceFirst("-", "ZCZC");
|
||||||
header = header.replaceFirst("-", wmoId);
|
header = header.replaceFirst("-", wmoId);
|
||||||
|
|
||||||
if (currDate != null)
|
if (currDate != null) {
|
||||||
header = header.replaceFirst("DDHHMM", currDate);
|
header = header.replaceFirst("DDHHMM", currDate);
|
||||||
else
|
} else {
|
||||||
header = header.replaceFirst("DDHHMM", textProd.getHdrtime());
|
header = header.replaceFirst("DDHHMM", textProd.getHdrtime());
|
||||||
|
}
|
||||||
String body = textEditor.getText().toUpperCase();
|
|
||||||
|
|
||||||
header = header + "\n\n"+body +"\n!--not sent--!";
|
String body = MixedCaseProductSupport.conditionalToUpper(nnn,
|
||||||
|
textEditor.getText());
|
||||||
|
|
||||||
|
header = header + "\n\n" + body + "\n!--not sent--!";
|
||||||
|
|
||||||
return header;
|
return header;
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,6 +47,13 @@
|
||||||
value="textws/gui"
|
value="textws/gui"
|
||||||
recursive="true">
|
recursive="true">
|
||||||
</path>
|
</path>
|
||||||
|
<path
|
||||||
|
application="TextWS"
|
||||||
|
localizationType="COMMON_STATIC"
|
||||||
|
name="Mixed Case"
|
||||||
|
value="mixedCase"
|
||||||
|
recursive="false">
|
||||||
|
</path>
|
||||||
</extension>
|
</extension>
|
||||||
|
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
|
@ -122,10 +122,10 @@ public abstract class AbstractAWIPSComponent extends CAVEApplication {
|
||||||
* getWorkbenchAdvisor()
|
* getWorkbenchAdvisor()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected WorkbenchAdvisor getWorkbenchAdvisor() {
|
protected final WorkbenchAdvisor getWorkbenchAdvisor() {
|
||||||
WorkbenchAdvisor workbenchAdvisor = null;
|
WorkbenchAdvisor workbenchAdvisor = null;
|
||||||
if ((getRuntimeModes() & WORKBENCH) != 0) {
|
if ((getRuntimeModes() & WORKBENCH) != 0) {
|
||||||
workbenchAdvisor = new AWIPSWorkbenchAdvisor();
|
workbenchAdvisor = createAWIPSWorkbenchAdvisor();
|
||||||
} else if (!isNonUIComponent()) {
|
} else if (!isNonUIComponent()) {
|
||||||
workbenchAdvisor = new HiddenWorkbenchAdvisor(getComponentName(),
|
workbenchAdvisor = new HiddenWorkbenchAdvisor(getComponentName(),
|
||||||
this);
|
this);
|
||||||
|
@ -141,6 +141,14 @@ public abstract class AbstractAWIPSComponent extends CAVEApplication {
|
||||||
return workbenchAdvisor;
|
return workbenchAdvisor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return A new instance of {@link AWIPSWorkbenchAdvisor} to use for the
|
||||||
|
* component's {@link WorkbenchAdvisor}
|
||||||
|
*/
|
||||||
|
protected AWIPSWorkbenchAdvisor createAWIPSWorkbenchAdvisor() {
|
||||||
|
return new AWIPSWorkbenchAdvisor();
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* (non-Javadoc)
|
* (non-Javadoc)
|
||||||
*
|
*
|
||||||
|
|
|
@ -71,7 +71,7 @@ CASE ${BACKUP_NAME}
|
||||||
WHEN 42 then 'DEBRIS FLOW'
|
WHEN 42 then 'DEBRIS FLOW'
|
||||||
WHEN 43 then 'BLOWING SNOW'
|
WHEN 43 then 'BLOWING SNOW'
|
||||||
WHEN 44 then 'RAIN'
|
WHEN 44 then 'RAIN'
|
||||||
ELSE ${BACKUP_NAME}
|
ELSE ''
|
||||||
END;
|
END;
|
||||||
UPDATE ${TABLE_NAME} set ${STATION_COLUMN} = concat(${LON_COLUMN}, ':', ${LAT_COLUMN});
|
UPDATE ${TABLE_NAME} set ${STATION_COLUMN} = concat(${LON_COLUMN}, ':', ${LAT_COLUMN});
|
||||||
ALTER TABLE ${TABLE_NAME} DROP COLUMN ${BACKUP_NAME};
|
ALTER TABLE ${TABLE_NAME} DROP COLUMN ${BACKUP_NAME};
|
||||||
|
|
28
deltaScripts/14.4.1/DR3454/createEventsSequences.sh
Executable file
28
deltaScripts/14.4.1/DR3454/createEventsSequences.sh
Executable file
|
@ -0,0 +1,28 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This script creates sequences for the tables in the events schema
|
||||||
|
|
||||||
|
STATS_MAX_VAL=$(psql -U awips -d metadata -t -c "select max(id)+1 from events.stats;")
|
||||||
|
NOTIFICATION_MAX_VAL=$(psql -U awips -d metadata -t -c "select max(id)+1 from events.notification;")
|
||||||
|
AGGREGATE_MAX_VAL=$(psql -U awips -d metadata -t -c "select max(id)+1 from events.aggregate;")
|
||||||
|
|
||||||
|
if [ -z $STATS_MAX_VAL ]
|
||||||
|
then
|
||||||
|
STATS_MAX_VAL=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z $NOTIFICATION_MAX_VAL ]
|
||||||
|
then
|
||||||
|
NOTIFICATION_MAX_VAL=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z $AGGREGATE_MAX_VAL ]
|
||||||
|
then
|
||||||
|
AGGREGATE_MAX_VAL=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
psql -U awips -d metadata -c \
|
||||||
|
"CREATE SEQUENCE stats_seq START WITH $STATS_MAX_VAL; \
|
||||||
|
CREATE SEQUENCE notification_seq START WITH $NOTIFICATION_MAX_VAL; \
|
||||||
|
CREATE SEQUENCE aggregate_seq START WITH $AGGREGATE_MAX_VAL;"
|
||||||
|
|
|
@ -73,6 +73,8 @@
|
||||||
<!-- Cache Properties -->
|
<!-- Cache Properties -->
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
|
@ -73,6 +73,8 @@
|
||||||
<!-- Cache Properties -->
|
<!-- Cache Properties -->
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
|
@ -73,6 +73,8 @@
|
||||||
<!-- Cache Properties -->
|
<!-- Cache Properties -->
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
|
@ -73,6 +73,8 @@
|
||||||
<!-- Cache Properties -->
|
<!-- Cache Properties -->
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
|
@ -73,6 +73,8 @@
|
||||||
<!-- Cache Properties -->
|
<!-- Cache Properties -->
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
|
@ -76,6 +76,8 @@
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
<property name="hibernate.jdbc.use_streams_for_binary">false</property>
|
<property name="hibernate.jdbc.use_streams_for_binary">false</property>
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">16</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">32</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
||||||
|
|
|
@ -70,6 +70,8 @@
|
||||||
<!-- Cache Properties -->
|
<!-- Cache Properties -->
|
||||||
<property name="hibernate.cache.use_second_level_cache">false</property>
|
<property name="hibernate.cache.use_second_level_cache">false</property>
|
||||||
<property name="hibernate.cache.use_query_cache">false</property>
|
<property name="hibernate.cache.use_query_cache">false</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_strong_references">8</property>
|
||||||
|
<property name="hibernate.query.plan_cache_max_soft_references">16</property>
|
||||||
|
|
||||||
</session-factory>
|
</session-factory>
|
||||||
</hibernate-configuration>
|
</hibernate-configuration>
|
||||||
|
|
|
@ -23,6 +23,8 @@
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 03/25/2014 #2664 randerso Added support for importing non-WGS84 shape files
|
# 03/25/2014 #2664 randerso Added support for importing non-WGS84 shape files
|
||||||
|
# 10/23/2014 #3685 randerso Fixed bug where .prj was not recognized when shape file
|
||||||
|
# was in the current directory (no directory specified)
|
||||||
#
|
#
|
||||||
##
|
##
|
||||||
|
|
||||||
|
@ -46,7 +48,7 @@ if [ $# -lt 3 ] ; then
|
||||||
exit -1
|
exit -1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
SHP_PATH=${1}
|
SHP_PATH=`readlink -f ${1}`
|
||||||
SHP_DIR="${SHP_PATH%/*}" # shape file dir
|
SHP_DIR="${SHP_PATH%/*}" # shape file dir
|
||||||
SHP_NAME="${SHP_PATH##*/}" # shape file name with extension
|
SHP_NAME="${SHP_PATH##*/}" # shape file name with extension
|
||||||
SHP_BASE="${SHP_NAME%.*}" # shape file name without extension
|
SHP_BASE="${SHP_NAME%.*}" # shape file name without extension
|
||||||
|
|
|
@ -106,6 +106,7 @@ import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
|
||||||
* Sep 30, 2013 #2361 njensen Use JAXBManager for XML
|
* Sep 30, 2013 #2361 njensen Use JAXBManager for XML
|
||||||
* Jan 21, 2014 #2720 randerso Improve efficiency of merging polygons in edit area generation
|
* Jan 21, 2014 #2720 randerso Improve efficiency of merging polygons in edit area generation
|
||||||
* Aug 27, 2014 #3563 randerso Fix issue where edit areas are regenerated unnecessarily
|
* Aug 27, 2014 #3563 randerso Fix issue where edit areas are regenerated unnecessarily
|
||||||
|
* Oct 20, 2014 #3685 randerso Changed structure of editAreaAttrs to keep zones from different maps separated
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -131,7 +132,7 @@ public class MapManager {
|
||||||
|
|
||||||
private final Map<String, List<String>> editAreaMap = new HashMap<String, List<String>>();
|
private final Map<String, List<String>> editAreaMap = new HashMap<String, List<String>>();
|
||||||
|
|
||||||
private final Map<String, Map<String, Object>> editAreaAttrs = new HashMap<String, Map<String, Object>>();
|
private final Map<String, List<Map<String, Object>>> editAreaAttrs = new HashMap<String, List<Map<String, Object>>>();
|
||||||
|
|
||||||
private final List<String> iscMarkersID = new ArrayList<String>();
|
private final List<String> iscMarkersID = new ArrayList<String>();
|
||||||
|
|
||||||
|
@ -811,6 +812,8 @@ public class MapManager {
|
||||||
private List<ReferenceData> createReferenceData(DbShapeSource mapDef) {
|
private List<ReferenceData> createReferenceData(DbShapeSource mapDef) {
|
||||||
// ServerResponse sr;
|
// ServerResponse sr;
|
||||||
List<ReferenceData> data = new ArrayList<ReferenceData>();
|
List<ReferenceData> data = new ArrayList<ReferenceData>();
|
||||||
|
List<Map<String, Object>> attributes = new ArrayList<Map<String, Object>>();
|
||||||
|
editAreaAttrs.put(mapDef.getDisplayName(), attributes);
|
||||||
|
|
||||||
// Module dean("DefaultEditAreaNaming");
|
// Module dean("DefaultEditAreaNaming");
|
||||||
ArrayList<String> created = new ArrayList<String>();
|
ArrayList<String> created = new ArrayList<String>();
|
||||||
|
@ -871,7 +874,8 @@ public class MapManager {
|
||||||
// handle new case
|
// handle new case
|
||||||
else {
|
else {
|
||||||
created.add(ean);
|
created.add(ean);
|
||||||
editAreaAttrs.put(ean, info);
|
info.put("editarea", ean);
|
||||||
|
attributes.add(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
tempData.put(ean, mp);
|
tempData.put(ean, mp);
|
||||||
|
|
|
@ -20,27 +20,36 @@
|
||||||
package com.raytheon.edex.plugin.gfe.textproducts;
|
package com.raytheon.edex.plugin.gfe.textproducts;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.PrintWriter;
|
||||||
|
import java.text.DecimalFormat;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import jep.JepException;
|
import jep.JepException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
|
|
||||||
import com.raytheon.edex.plugin.gfe.reference.MapManager;
|
import com.raytheon.edex.plugin.gfe.reference.MapManager;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil;
|
import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil;
|
||||||
|
import com.raytheon.uf.common.dataquery.db.QueryResult;
|
||||||
|
import com.raytheon.uf.common.dataquery.db.QueryResultRow;
|
||||||
import com.raytheon.uf.common.localization.IPathManager;
|
import com.raytheon.uf.common.localization.IPathManager;
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||||
|
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||||
|
import com.raytheon.uf.common.localization.exception.LocalizationException;
|
||||||
import com.raytheon.uf.common.python.PyUtil;
|
import com.raytheon.uf.common.python.PyUtil;
|
||||||
import com.raytheon.uf.common.python.PythonScript;
|
import com.raytheon.uf.common.python.PythonScript;
|
||||||
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.util.FileUtil;
|
import com.raytheon.uf.common.util.FileUtil;
|
||||||
|
import com.raytheon.uf.edex.database.tasks.SqlQueryTask;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* Code to generate the AreaDictionary for text formatters
|
||||||
*
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
*
|
*
|
||||||
|
@ -49,6 +58,8 @@ import com.raytheon.uf.common.util.FileUtil;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* May 4, 2011 wldougher Moved from MapManager
|
* May 4, 2011 wldougher Moved from MapManager
|
||||||
|
* Oct 10, 2014 #3685 randerso Add code to generate the fips2cities and zones2cites
|
||||||
|
* python modules from the GIS database tables
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -57,11 +68,61 @@ import com.raytheon.uf.common.util.FileUtil;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class AreaDictionaryMaker {
|
public class AreaDictionaryMaker {
|
||||||
private static final Log theLogger = LogFactory
|
protected static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
.getLog(AreaDictionaryMaker.class);
|
.getHandler(AreaDictionaryMaker.class);
|
||||||
|
|
||||||
|
protected static final String FIPS_CITY_QUERY = //
|
||||||
|
"SELECT name, population, ST_Y(city.the_geom), ST_X(city.the_geom) "
|
||||||
|
+ "FROM mapdata.city, mapdata.county "
|
||||||
|
+ "WHERE county.state = '%1$s' AND substring(fips,3,3) = '%2$s' "
|
||||||
|
+ "AND ST_Contains(county.the_geom, city.the_geom) "
|
||||||
|
+ "ORDER BY city.name;";
|
||||||
|
|
||||||
|
protected static final String ZONES_CITY_QUERY = //
|
||||||
|
"SELECT city.name, population, ST_Y(city.the_geom), ST_X(city.the_geom) "
|
||||||
|
+ "FROM mapdata.city, mapdata.zone "
|
||||||
|
+ "WHERE zone.state = '%1$s' AND zone.zone = '%2$s' "
|
||||||
|
+ "AND ST_Contains(zone.the_geom, city.the_geom) "
|
||||||
|
+ "ORDER BY city.name;";
|
||||||
|
|
||||||
|
protected static final Map<String, String> PART_OF_STATE;
|
||||||
|
static {
|
||||||
|
PART_OF_STATE = new HashMap<>(30, 1.0f);
|
||||||
|
PART_OF_STATE.put(null, "");
|
||||||
|
PART_OF_STATE.put("bb", "big bend");
|
||||||
|
PART_OF_STATE.put("c", "");
|
||||||
|
PART_OF_STATE.put("cc", "central");
|
||||||
|
PART_OF_STATE.put("E", "");
|
||||||
|
PART_OF_STATE.put("ea", "east");
|
||||||
|
PART_OF_STATE.put("ec", "east central");
|
||||||
|
PART_OF_STATE.put("ee", "eastern");
|
||||||
|
PART_OF_STATE.put("er", "east central upper");
|
||||||
|
PART_OF_STATE.put("eu", "eastern upper");
|
||||||
|
PART_OF_STATE.put("M", "");
|
||||||
|
PART_OF_STATE.put("mi", "middle");
|
||||||
|
PART_OF_STATE.put("nc", "north central");
|
||||||
|
PART_OF_STATE.put("ne", "northeast");
|
||||||
|
PART_OF_STATE.put("nn", "northern");
|
||||||
|
PART_OF_STATE.put("nr", "north central upper");
|
||||||
|
PART_OF_STATE.put("nw", "northwest");
|
||||||
|
PART_OF_STATE.put("pa", "panhandle");
|
||||||
|
PART_OF_STATE.put("pd", "piedmont");
|
||||||
|
PART_OF_STATE.put("sc", "south central");
|
||||||
|
PART_OF_STATE.put("se", "southeast");
|
||||||
|
PART_OF_STATE.put("so", "south");
|
||||||
|
PART_OF_STATE.put("sr", "south central upper");
|
||||||
|
PART_OF_STATE.put("ss", "southern");
|
||||||
|
PART_OF_STATE.put("sw", "southwest");
|
||||||
|
PART_OF_STATE.put("up", "upstate");
|
||||||
|
PART_OF_STATE.put("wc", "west central");
|
||||||
|
PART_OF_STATE.put("wu", "western upper");
|
||||||
|
PART_OF_STATE.put("ww", "western");
|
||||||
|
}
|
||||||
|
|
||||||
protected IPathManager pathMgr = PathManagerFactory.getPathManager();
|
protected IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||||
|
|
||||||
|
private Map<String, String> stateDict;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate the AreaDictionary.py and CityLocation.py scripts for site,
|
* Generate the AreaDictionary.py and CityLocation.py scripts for site,
|
||||||
* using editAreaAttrs.
|
* using editAreaAttrs.
|
||||||
|
@ -73,14 +134,14 @@ public class AreaDictionaryMaker {
|
||||||
* A Map from edit area names to shape file attributes
|
* A Map from edit area names to shape file attributes
|
||||||
*/
|
*/
|
||||||
public void genAreaDictionary(String site,
|
public void genAreaDictionary(String site,
|
||||||
Map<String, Map<String, Object>> editAreaAttrs) {
|
Map<String, List<Map<String, Object>>> editAreaAttrs) {
|
||||||
theLogger.info("Area Dictionary generation phase");
|
statusHandler.info("Area Dictionary generation phase");
|
||||||
|
|
||||||
if (site == null) {
|
if (site == null) {
|
||||||
throw new IllegalArgumentException("site is null");
|
throw new IllegalArgumentException("site is null");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ("".equals(site)) {
|
if (site.isEmpty()) {
|
||||||
throw new IllegalArgumentException("site is an empty string");
|
throw new IllegalArgumentException("site is an empty string");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,14 +150,99 @@ public class AreaDictionaryMaker {
|
||||||
}
|
}
|
||||||
|
|
||||||
long t0 = System.currentTimeMillis();
|
long t0 = System.currentTimeMillis();
|
||||||
|
genStateDict();
|
||||||
|
|
||||||
LocalizationContext cx = pathMgr.getContext(
|
LocalizationContext context = pathMgr.getContext(
|
||||||
|
LocalizationContext.LocalizationType.EDEX_STATIC,
|
||||||
|
LocalizationContext.LocalizationLevel.CONFIGURED);
|
||||||
|
context.setContextName(site);
|
||||||
|
|
||||||
|
List<Map<String, Object>> countyAttrs = editAreaAttrs.get("Counties");
|
||||||
|
List<Map<String, Object>> zoneAttrs = editAreaAttrs.get("Zones");
|
||||||
|
|
||||||
|
// To generate national fips2cities and zones2cities files
|
||||||
|
// uncomment the following lines. This should be done for testing
|
||||||
|
// purposes only and should not be checked in uncommented.
|
||||||
|
|
||||||
|
// context = pathMgr.getContext(
|
||||||
|
// LocalizationContext.LocalizationType.EDEX_STATIC,
|
||||||
|
// LocalizationContext.LocalizationLevel.BASE);
|
||||||
|
//
|
||||||
|
// String fipsQuery =
|
||||||
|
// "SELECT fips, state, fe_area, cwa FROM mapdata.county ORDER BY state, fips;";
|
||||||
|
//
|
||||||
|
// SqlQueryTask task = new SqlQueryTask(fipsQuery, "maps");
|
||||||
|
// try {
|
||||||
|
// QueryResult results = task.execute();
|
||||||
|
// countyAttrs = new ArrayList<Map<String, Object>>(
|
||||||
|
// results.getResultCount());
|
||||||
|
// for (QueryResultRow row : results.getRows()) {
|
||||||
|
// String num = (String) row.getColumn(0);
|
||||||
|
// if (num == null) {
|
||||||
|
// continue;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Map<String, Object> map = new HashMap<>(3, 1.0f);
|
||||||
|
// countyAttrs.add(map);
|
||||||
|
//
|
||||||
|
// String st = (String) row.getColumn(1);
|
||||||
|
// int len = num.length();
|
||||||
|
//
|
||||||
|
// map.put("editarea", st + 'C' + num.substring(len - 3, len));
|
||||||
|
// map.put("fe_area", row.getColumn(2));
|
||||||
|
// map.put("cwa", row.getColumn(3));
|
||||||
|
// }
|
||||||
|
// } catch (Exception e) {
|
||||||
|
// statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// String zonesQuery =
|
||||||
|
// "SELECT zone, state, fe_area, cwa FROM mapdata.zone ORDER BY state, zone;";
|
||||||
|
// task = new SqlQueryTask(zonesQuery, "maps");
|
||||||
|
// try {
|
||||||
|
// QueryResult results = task.execute();
|
||||||
|
// zoneAttrs = new ArrayList<Map<String, Object>>(
|
||||||
|
// results.getResultCount());
|
||||||
|
// for (QueryResultRow row : results.getRows()) {
|
||||||
|
// String num = (String) row.getColumn(0);
|
||||||
|
// if (num == null) {
|
||||||
|
// continue;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Map<String, Object> map = new HashMap<>(3, 1.0f);
|
||||||
|
// zoneAttrs.add(map);
|
||||||
|
//
|
||||||
|
// String st = (String) row.getColumn(1);
|
||||||
|
// int len = num.length();
|
||||||
|
//
|
||||||
|
// map.put("editarea", st + 'Z' + num.substring(len - 3, len));
|
||||||
|
// map.put("fe_area", row.getColumn(2));
|
||||||
|
// map.put("cwa", row.getColumn(3));
|
||||||
|
// }
|
||||||
|
// } catch (Exception e) {
|
||||||
|
// statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// To generate national fips2cities and zones2cities files
|
||||||
|
// uncomment the previous lines
|
||||||
|
|
||||||
|
genFips2Cities(context, countyAttrs);
|
||||||
|
genZones2Cities(context, zoneAttrs);
|
||||||
|
|
||||||
|
LocalizationContext baseCtx = pathMgr.getContext(
|
||||||
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
|
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
|
||||||
File scriptFile = pathMgr.getLocalizationFile(cx,
|
File scriptFile = pathMgr.getLocalizationFile(baseCtx,
|
||||||
FileUtil.join("gfe", "createAreaDictionary.py")).getFile();
|
FileUtil.join("gfe", "createAreaDictionary.py")).getFile();
|
||||||
|
|
||||||
|
LocalizationContext configCtx = pathMgr.getContext(
|
||||||
|
LocalizationType.EDEX_STATIC, LocalizationLevel.CONFIGURED);
|
||||||
|
configCtx.setContextName(site);
|
||||||
|
File configDir = pathMgr.getLocalizationFile(configCtx, "gfe")
|
||||||
|
.getFile();
|
||||||
|
|
||||||
String includePath = PyUtil.buildJepIncludePath(true,
|
String includePath = PyUtil.buildJepIncludePath(true,
|
||||||
GfePyIncludeUtil.getCommonPythonIncludePath(),
|
GfePyIncludeUtil.getCommonPythonIncludePath(),
|
||||||
scriptFile.getParent());
|
configDir.getPath(), scriptFile.getParent());
|
||||||
Map<String, Object> argMap = new HashMap<String, Object>();
|
Map<String, Object> argMap = new HashMap<String, Object>();
|
||||||
|
|
||||||
LocalizationContext caveStaticConfig = pathMgr.getContext(
|
LocalizationContext caveStaticConfig = pathMgr.getContext(
|
||||||
|
@ -120,7 +266,7 @@ public class AreaDictionaryMaker {
|
||||||
// createAreaDictionary()
|
// createAreaDictionary()
|
||||||
pyScript.execute("createCityLocation", argMap);
|
pyScript.execute("createCityLocation", argMap);
|
||||||
} catch (JepException e) {
|
} catch (JepException e) {
|
||||||
theLogger.error("Error generating area dictionary", e);
|
statusHandler.error("Error generating area dictionary", e);
|
||||||
} finally {
|
} finally {
|
||||||
if (pyScript != null) {
|
if (pyScript != null) {
|
||||||
pyScript.dispose();
|
pyScript.dispose();
|
||||||
|
@ -128,6 +274,138 @@ public class AreaDictionaryMaker {
|
||||||
}
|
}
|
||||||
|
|
||||||
long t1 = System.currentTimeMillis();
|
long t1 = System.currentTimeMillis();
|
||||||
theLogger.info("Area Dictionary generation time: " + (t1 - t0) + " ms");
|
statusHandler.info("Area Dictionary generation time: " + (t1 - t0)
|
||||||
|
+ " ms");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void genFips2Cities(LocalizationContext context,
|
||||||
|
List<Map<String, Object>> attributes) {
|
||||||
|
genArea2Cities(context, attributes, "fips2cities.py", "fipsdata",
|
||||||
|
"FIPS", 'C', FIPS_CITY_QUERY);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void genZones2Cities(LocalizationContext context,
|
||||||
|
List<Map<String, Object>> attributes) {
|
||||||
|
genArea2Cities(context, attributes, "zones2cities.py", "zonedata",
|
||||||
|
"Zones", 'Z', ZONES_CITY_QUERY);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void genArea2Cities(LocalizationContext context,
|
||||||
|
List<Map<String, Object>> attributes, String fileName,
|
||||||
|
String dictName, String group, char separator, String cityQuery) {
|
||||||
|
|
||||||
|
LocalizationFile lf = pathMgr.getLocalizationFile(context,
|
||||||
|
FileUtil.join("gfe", fileName));
|
||||||
|
|
||||||
|
try (PrintWriter out = new PrintWriter(lf.openOutputStream())) {
|
||||||
|
out.println(dictName + " = {");
|
||||||
|
|
||||||
|
try {
|
||||||
|
DecimalFormat df = new DecimalFormat("0.00000");
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
Pattern pattern = Pattern.compile("(\\p{Upper}{2})" + separator
|
||||||
|
+ "(\\d{3})");
|
||||||
|
|
||||||
|
for (Map<String, Object> att : attributes) {
|
||||||
|
String ean = (String) att.get("editarea");
|
||||||
|
if ((ean == null) || ean.isEmpty()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
Matcher matcher = pattern.matcher(ean);
|
||||||
|
if (!matcher.matches()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
String state = matcher.group(1);
|
||||||
|
String num = matcher.group(2);
|
||||||
|
|
||||||
|
String fullStateName = this.stateDict.get(state);
|
||||||
|
String partOfState = PART_OF_STATE.get(att.get("fe_area"));
|
||||||
|
String wfo = (String) att.get("cwa");
|
||||||
|
|
||||||
|
SqlQueryTask task = new SqlQueryTask(String.format(
|
||||||
|
cityQuery, state, num), "maps");
|
||||||
|
|
||||||
|
// retrieve cities for this area
|
||||||
|
QueryResult citiesResult = null;
|
||||||
|
try {
|
||||||
|
citiesResult = task.execute();
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler
|
||||||
|
.error("Error getting cites for " + ean, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.setLength(0);
|
||||||
|
sb.append("'").append(ean).append("': {");
|
||||||
|
sb.append("'fullStateName': '").append(fullStateName)
|
||||||
|
.append("', ");
|
||||||
|
sb.append("'state': '").append(state).append("', ");
|
||||||
|
|
||||||
|
sb.append("'cities': [");
|
||||||
|
if ((citiesResult != null)
|
||||||
|
&& (citiesResult.getResultCount() > 0)) {
|
||||||
|
for (QueryResultRow city : citiesResult.getRows()) {
|
||||||
|
String name = (String) city.getColumn(0);
|
||||||
|
Object population = city.getColumn(1);
|
||||||
|
Double lat = (Double) city.getColumn(2);
|
||||||
|
Double lon = (Double) city.getColumn(3);
|
||||||
|
|
||||||
|
if (name.indexOf("'") >= 0) {
|
||||||
|
sb.append("(\"").append(name).append("\", ");
|
||||||
|
} else {
|
||||||
|
sb.append("('").append(name).append("', ");
|
||||||
|
}
|
||||||
|
if (population == null) {
|
||||||
|
sb.append("None, ");
|
||||||
|
} else {
|
||||||
|
sb.append(population.toString()).append(", ");
|
||||||
|
}
|
||||||
|
sb.append("'").append(df.format(lat)).append("', ");
|
||||||
|
sb.append("'").append(df.format(lon))
|
||||||
|
.append("'), ");
|
||||||
|
}
|
||||||
|
sb.setLength(sb.length() - 2);
|
||||||
|
}
|
||||||
|
sb.append("], ");
|
||||||
|
|
||||||
|
sb.append("'partOfState': '").append(partOfState)
|
||||||
|
.append("', ");
|
||||||
|
sb.append("'wfo': '").append(wfo).append("'}, ");
|
||||||
|
out.println(sb.toString());
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
out.println("}");
|
||||||
|
} catch (LocalizationException e) {
|
||||||
|
statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
lf.save();
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void genStateDict() {
|
||||||
|
SqlQueryTask task = new SqlQueryTask(
|
||||||
|
"SELECT state, name FROM mapdata.states", "maps");
|
||||||
|
try {
|
||||||
|
QueryResult result = task.execute();
|
||||||
|
stateDict = new HashMap<String, String>(result.getResultCount(),
|
||||||
|
1.0f);
|
||||||
|
for (QueryResultRow row : result.getRows()) {
|
||||||
|
String st = (String) row.getColumn(0);
|
||||||
|
String name = (String) row.getColumn(1);
|
||||||
|
stateDict.put(st, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ package com.raytheon.edex.plugin.gfe.textproducts;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.io.PrintWriter;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -32,10 +32,14 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
import com.raytheon.edex.utility.ProtectedFiles;
|
import com.raytheon.edex.utility.ProtectedFiles;
|
||||||
|
import com.raytheon.uf.common.dataplugin.gfe.python.GfePyIncludeUtil;
|
||||||
|
import com.raytheon.uf.common.dataquery.db.QueryResult;
|
||||||
|
import com.raytheon.uf.common.dataquery.db.QueryResultRow;
|
||||||
import com.raytheon.uf.common.localization.IPathManager;
|
import com.raytheon.uf.common.localization.IPathManager;
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||||
|
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||||
import com.raytheon.uf.common.python.PyUtil;
|
import com.raytheon.uf.common.python.PyUtil;
|
||||||
import com.raytheon.uf.common.python.PythonScript;
|
import com.raytheon.uf.common.python.PythonScript;
|
||||||
|
@ -45,6 +49,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.common.util.FileUtil;
|
import com.raytheon.uf.common.util.FileUtil;
|
||||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
||||||
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||||
|
import com.raytheon.uf.edex.database.tasks.SqlQueryTask;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate and configure text products when needed.
|
* Generate and configure text products when needed.
|
||||||
|
@ -59,11 +64,13 @@ import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||||
* SOFTWARE HISTORY
|
* SOFTWARE HISTORY
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Jul 7, 2008 1222 jelkins Initial creation
|
* Jul 7, 2008 1222 jelkins Initial creation
|
||||||
* Jul 24,2012 #944 dgilling Fix text product template generation
|
* Jul 24, 2012 #944 dgilling Fix text product template generation
|
||||||
* to create textProducts and textUtilities.
|
* to create textProducts and textUtilities.
|
||||||
* Sep 07,2012 #1150 dgilling Fix isConfigured to check for textProducts
|
* Sep 07, 2012 #1150 dgilling Fix isConfigured to check for textProducts
|
||||||
* and textUtilities dirs.
|
* and textUtilities dirs.
|
||||||
|
* Oct 20, 2014 #3685 randerso Added code to generate SiteCFG.py from GIS database
|
||||||
|
* Cleaned up how protected file updates are returned
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -75,6 +82,8 @@ public class Configurator {
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
.getHandler(Configurator.class);
|
.getHandler(Configurator.class);
|
||||||
|
|
||||||
|
private static final String CWA_QUERY = "select wfo, region, fullstaid, citystate, city, state from mapdata.cwa order by wfo;";
|
||||||
|
|
||||||
private static final String CONFIG_TEXT_PRODUCTS_TASK = "GfeConfigureTextProducts";
|
private static final String CONFIG_TEXT_PRODUCTS_TASK = "GfeConfigureTextProducts";
|
||||||
|
|
||||||
private String siteID;
|
private String siteID;
|
||||||
|
@ -183,26 +192,79 @@ public class Configurator {
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public void execute() {
|
public void execute() {
|
||||||
PythonScript python = null;
|
if (isConfigured()) {
|
||||||
List<String> preEvals = new ArrayList<String>();
|
statusHandler.info("All text products are up to date");
|
||||||
|
return;
|
||||||
|
}
|
||||||
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
IPathManager pathMgr = PathManagerFactory.getPathManager();
|
||||||
|
LocalizationContext context = pathMgr.getContext(
|
||||||
|
LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED);
|
||||||
|
context.setContextName(siteID);
|
||||||
|
|
||||||
|
// regenerate siteCFG.py
|
||||||
|
LocalizationFile lf = null;
|
||||||
|
try {
|
||||||
|
lf = pathMgr.getLocalizationFile(context,
|
||||||
|
FileUtil.join("python", "gfe", "SiteCFG.py"));
|
||||||
|
|
||||||
|
SqlQueryTask task = new SqlQueryTask(CWA_QUERY, "maps");
|
||||||
|
QueryResult results = task.execute();
|
||||||
|
try (PrintWriter out = new PrintWriter(lf.openOutputStream())) {
|
||||||
|
out.println("##");
|
||||||
|
out.println("# Contains information about products, regions, etc. for each site");
|
||||||
|
out.println("# in the country.");
|
||||||
|
out.println("# region= two-letter regional identifier, mainly used for installation of");
|
||||||
|
out.println("# text product templates");
|
||||||
|
out.println("SiteInfo= {");
|
||||||
|
for (QueryResultRow row : results.getRows()) {
|
||||||
|
String wfo = (String) row.getColumn(0);
|
||||||
|
String region = (String) row.getColumn(1);
|
||||||
|
String fullStationID = (String) row.getColumn(2);
|
||||||
|
String wfoCityState = (String) row.getColumn(3);
|
||||||
|
String wfoCity = (String) row.getColumn(4);
|
||||||
|
String state = (String) row.getColumn(5);
|
||||||
|
|
||||||
|
out.println(formatEntry(wfo, region, fullStationID,
|
||||||
|
wfoCityState, wfoCity, state));
|
||||||
|
|
||||||
|
// Add in AFC's dual domain sites
|
||||||
|
if (wfo.equals("AFC")) {
|
||||||
|
out.println(formatEntry("AER", region, fullStationID,
|
||||||
|
wfoCityState, wfoCity, state));
|
||||||
|
out.println(formatEntry("ALU", region, fullStationID,
|
||||||
|
wfoCityState, wfoCity, state));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add in the national centers since they
|
||||||
|
// aren't in the shape file
|
||||||
|
out.println(formatEntry("NH1", "NC", "KNHC",
|
||||||
|
"National Hurricane Center Miami FL", "Miami", ""));
|
||||||
|
out.println(formatEntry("NH2", "NC", "KNHC",
|
||||||
|
"National Hurricane Center Miami FL", "Miami", ""));
|
||||||
|
out.println(formatEntry("ONA", "NC", "KWBC",
|
||||||
|
"Ocean Prediction Center Washington DC",
|
||||||
|
"Washington DC", ""));
|
||||||
|
out.println(formatEntry("ONP", "NC", "KWBC",
|
||||||
|
"Ocean Prediction Center Washington DC",
|
||||||
|
"Washington DC", ""));
|
||||||
|
|
||||||
|
out.println("}");
|
||||||
|
} // out is closed here
|
||||||
|
|
||||||
|
lf.save();
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler.error(e.getLocalizedMessage(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
PythonScript python = null;
|
||||||
|
|
||||||
LocalizationContext edexCx = pathMgr.getContext(
|
LocalizationContext edexCx = pathMgr.getContext(
|
||||||
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
|
LocalizationType.EDEX_STATIC, LocalizationLevel.BASE);
|
||||||
LocalizationContext commonCx = pathMgr.getContext(
|
|
||||||
LocalizationType.COMMON_STATIC, LocalizationLevel.BASE);
|
|
||||||
|
|
||||||
String filePath = pathMgr.getFile(edexCx,
|
String filePath = pathMgr.getFile(edexCx,
|
||||||
"textproducts" + File.separator + "Generator.py").getPath();
|
"textproducts" + File.separator + "Generator.py").getPath();
|
||||||
String textProductPath = pathMgr.getFile(edexCx,
|
String commonPython = GfePyIncludeUtil.getCommonPythonIncludePath();
|
||||||
"textProducts.Generator").getPath();
|
|
||||||
String jutilPath = pathMgr.getFile(commonCx, "python").getPath();
|
|
||||||
|
|
||||||
// Add some getters we need "in the script" that we want hidden
|
|
||||||
preEvals.add("from JUtil import pylistToJavaStringList");
|
|
||||||
preEvals.add("from textproducts.Generator import Generator");
|
|
||||||
preEvals.add("generator = Generator()");
|
|
||||||
preEvals.add("def getProtectedData():\n return pylistToJavaStringList(generator.getProtectedFiles())");
|
|
||||||
|
|
||||||
Map<String, Object> argList = new HashMap<String, Object>();
|
Map<String, Object> argList = new HashMap<String, Object>();
|
||||||
argList.put("siteId", siteID);
|
argList.put("siteId", siteID);
|
||||||
|
@ -210,20 +272,18 @@ public class Configurator {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
python = new PythonScript(filePath, PyUtil.buildJepIncludePath(
|
python = new PythonScript(filePath, PyUtil.buildJepIncludePath(
|
||||||
pythonDirectory, textProductPath, jutilPath), this
|
pythonDirectory, commonPython), this.getClass()
|
||||||
.getClass().getClassLoader(), preEvals);
|
.getClassLoader());
|
||||||
|
|
||||||
// Open the Python interpreter using the designated script.
|
// Open the Python interpreter using the designated script.
|
||||||
python.execute("generator.create", argList);
|
protectedFilesList = (List<String>) python.execute("runFromJava",
|
||||||
protectedFilesList = (List<String>) python.execute(
|
argList);
|
||||||
"getProtectedData", null);
|
|
||||||
|
|
||||||
updateProtectedFile();
|
updateProtectedFile();
|
||||||
updateLastRuntime();
|
updateLastRuntime();
|
||||||
} catch (JepException e) {
|
} catch (JepException e) {
|
||||||
statusHandler.handle(Priority.PROBLEM,
|
statusHandler.handle(Priority.PROBLEM,
|
||||||
"Error Configuring Text Products", e);
|
"Error Configuring Text Products", e);
|
||||||
e.printStackTrace();
|
|
||||||
} finally {
|
} finally {
|
||||||
if (python != null) {
|
if (python != null) {
|
||||||
python.dispose();
|
python.dispose();
|
||||||
|
@ -231,6 +291,22 @@ public class Configurator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String formatEntry(String wfo, String region, String fullStationID,
|
||||||
|
String wfoCityState, String wfoCity, String state) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append(" '").append(wfo).append("': {\n");
|
||||||
|
sb.append(" 'region': '").append(region).append("',\n");
|
||||||
|
sb.append(" 'fullStationID': '").append(fullStationID)
|
||||||
|
.append("',\n");
|
||||||
|
sb.append(" 'wfoCityState': '").append(wfoCityState)
|
||||||
|
.append("',\n");
|
||||||
|
sb.append(" 'wfoCity': '").append(wfoCity).append("',\n");
|
||||||
|
sb.append(" 'state': '").append(state).append("',\n");
|
||||||
|
sb.append(" },");
|
||||||
|
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update the protected files.
|
* Update the protected files.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -35,8 +35,8 @@ from zones2cities import *
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 01/08/10 #1209 randerso Initial Creation.
|
# 01/08/10 #1209 randerso Initial Creation.
|
||||||
# 10/19/12 #1091 dgilling Support localMaps.py.
|
# 10/19/12 #1091 dgilling Support localMaps.py.
|
||||||
#
|
# 10/20/2014 #3685 randerso Converted text to mixed case
|
||||||
#
|
# Fixed mapDict to keep zones from different maps separate
|
||||||
#
|
#
|
||||||
|
|
||||||
CityLocationDict = {}
|
CityLocationDict = {}
|
||||||
|
@ -111,13 +111,23 @@ def makeCityString(dictRecord):
|
||||||
# handle marine states
|
# handle marine states
|
||||||
def checkMarineState(ugcCode):
|
def checkMarineState(ugcCode):
|
||||||
#returns None if unknown, description if known
|
#returns None if unknown, description if known
|
||||||
areas = {'AM': 'ATLANTIC COASTAL WATERS', 'GM': 'GULF OF MEXICO',
|
areas = {
|
||||||
'LE': 'LAKE ERIE', 'LO': 'LAKE ONTARIO', 'LH': 'LAKE HURON',
|
'AM': 'Atlantic coastal waters',
|
||||||
'SC': 'LAKE ST CLAIR', 'LM': 'LAKE MICHIGAN', 'LS': 'LAKE SUPERIOR',
|
'GM': 'Gulf of Mexico',
|
||||||
'PZ': 'PACIFIC COASTAL WATERS', 'PK': 'ALASKAN COASTAL WATERS',
|
'LE': 'Lake Erie',
|
||||||
'PH': 'HAWAIIAN COASTAL WATERS', 'PM': 'MARIANAS WATERS',
|
'LO': 'Lake Ontario',
|
||||||
'AN': 'ATLANTIC COASTAL WATERS', 'PS': 'AMERICAN SAMOA COASTAL WATERS',
|
'LH': 'Lake Huron',
|
||||||
'SL': 'ST LAWRENCE RIVER'}
|
'SC': 'Lake St Clair',
|
||||||
|
'LM': 'Lake Michigan',
|
||||||
|
'LS': 'Lake Superior',
|
||||||
|
'PZ': 'Pacific coastal waters',
|
||||||
|
'PK': 'Alaskan coastal waters',
|
||||||
|
'PH': 'Hawaiian coastal waters',
|
||||||
|
'PM': 'Marianas waters',
|
||||||
|
'AN': 'Atlantic coastal waters',
|
||||||
|
'PS': 'American Samoa coastal waters',
|
||||||
|
'SL': 'St Lawrence River',
|
||||||
|
}
|
||||||
area = ugcCode[0:2]
|
area = ugcCode[0:2]
|
||||||
return areas.get(area, None)
|
return areas.get(area, None)
|
||||||
|
|
||||||
|
@ -128,82 +138,86 @@ def createAreaDictionary(outputDir, mapDict):
|
||||||
areadict = {}
|
areadict = {}
|
||||||
mapIter = mapDict.entrySet().iterator()
|
mapIter = mapDict.entrySet().iterator()
|
||||||
while mapIter.hasNext():
|
while mapIter.hasNext():
|
||||||
entry = mapIter.next()
|
mapEntry = mapIter.next()
|
||||||
ean = str(entry.getKey())
|
mapname = str(mapEntry.getKey())
|
||||||
att = entry.getValue()
|
attList = mapEntry.getValue()
|
||||||
if len(ean):
|
attIter = attList.iterator()
|
||||||
try:
|
while attIter.hasNext():
|
||||||
d = {}
|
att = attIter.next()
|
||||||
if att.containsKey('zone') and att.containsKey('state'):
|
ean = str(att.get("editarea"))
|
||||||
d['ugcCode'] = str(att.get('state')) + "Z" + str(att.get('zone'))
|
if len(ean):
|
||||||
elif att.containsKey('id'):
|
try:
|
||||||
d['ugcCode'] = str(att.get('id'))
|
d = {}
|
||||||
elif att.containsKey('fips') and att.containsKey('state') and \
|
if att.containsKey('zone') and att.containsKey('state'):
|
||||||
att.containsKey('countyname'):
|
d['ugcCode'] = str(att.get('state')) + "Z" + str(att.get('zone'))
|
||||||
d['ugcCode'] = str(att.get('state')) + "C" + str(att.get('fips'))[-3:]
|
elif att.containsKey('id'):
|
||||||
d['ugcName'] = string.strip(str(att.get('countyname')))
|
d['ugcCode'] = str(att.get('id'))
|
||||||
else:
|
elif att.containsKey('fips') and att.containsKey('state') and \
|
||||||
continue
|
att.containsKey('countyname'):
|
||||||
|
d['ugcCode'] = str(att.get('state')) + "C" + str(att.get('fips'))[-3:]
|
||||||
if att.containsKey('state'):
|
d['ugcName'] = string.strip(str(att.get('countyname')))
|
||||||
d["stateAbbr"] = str(att.get('state'))
|
else:
|
||||||
|
continue
|
||||||
if att.containsKey('name'):
|
|
||||||
d["ugcName"] = string.strip(str(att.get('name')))
|
if att.containsKey('state'):
|
||||||
|
d["stateAbbr"] = str(att.get('state'))
|
||||||
if att.containsKey('time_zone'):
|
|
||||||
tzvalue = getRealTimeZone(str(att.get('time_zone')))
|
if att.containsKey('name'):
|
||||||
if tzvalue is not None:
|
d["ugcName"] = string.strip(str(att.get('name')))
|
||||||
d["ugcTimeZone"] = tzvalue
|
|
||||||
|
if att.containsKey('time_zone'):
|
||||||
if zonedata.has_key(d['ugcCode']):
|
tzvalue = getRealTimeZone(str(att.get('time_zone')))
|
||||||
cityDict = zonedata[d['ugcCode']]
|
if tzvalue is not None:
|
||||||
elif fipsdata.has_key(d['ugcCode']):
|
d["ugcTimeZone"] = tzvalue
|
||||||
cityDict = fipsdata[d['ugcCode']]
|
|
||||||
else:
|
if zonedata.has_key(d['ugcCode']):
|
||||||
cityDict = None
|
cityDict = zonedata[d['ugcCode']]
|
||||||
|
elif fipsdata.has_key(d['ugcCode']):
|
||||||
if cityDict:
|
cityDict = fipsdata[d['ugcCode']]
|
||||||
cityString = makeCityString(cityDict)
|
else:
|
||||||
if cityString is not None:
|
cityDict = None
|
||||||
cityString, locs = cityString
|
|
||||||
if len(cityString):
|
if cityDict:
|
||||||
d["ugcCityString"] = cityString
|
cityString = makeCityString(cityDict)
|
||||||
CityLocationDict[ean] = locs
|
if cityString is not None:
|
||||||
|
cityString, locs = cityString
|
||||||
# partOfState codes
|
if len(cityString):
|
||||||
if zonedata.has_key(d['ugcCode']):
|
d["ugcCityString"] = cityString
|
||||||
if zonedata[d['ugcCode']].has_key('partOfState'):
|
CityLocationDict[ean] = locs
|
||||||
d["partOfState"] = \
|
|
||||||
zonedata[d['ugcCode']]['partOfState']
|
# partOfState codes
|
||||||
elif fipsdata.has_key(d['ugcCode']):
|
if zonedata.has_key(d['ugcCode']):
|
||||||
if fipsdata[d['ugcCode']].has_key('partOfState'):
|
if zonedata[d['ugcCode']].has_key('partOfState'):
|
||||||
d["partOfState"] = \
|
d["partOfState"] = \
|
||||||
fipsdata[d['ugcCode']]['partOfState']
|
zonedata[d['ugcCode']]['partOfState']
|
||||||
|
elif fipsdata.has_key(d['ugcCode']):
|
||||||
# full state name
|
if fipsdata[d['ugcCode']].has_key('partOfState'):
|
||||||
if zonedata.has_key(d['ugcCode']):
|
d["partOfState"] = \
|
||||||
if zonedata[d['ugcCode']].has_key('fullStateName'):
|
fipsdata[d['ugcCode']]['partOfState']
|
||||||
d["fullStateName"] = \
|
|
||||||
zonedata[d['ugcCode']]['fullStateName']
|
# full state name
|
||||||
elif fipsdata.has_key(d['ugcCode']):
|
if zonedata.has_key(d['ugcCode']):
|
||||||
if fipsdata[d['ugcCode']].has_key('fullStateName'):
|
if zonedata[d['ugcCode']].has_key('fullStateName'):
|
||||||
d["fullStateName"] = \
|
d["fullStateName"] = \
|
||||||
fipsdata[d['ugcCode']]['fullStateName']
|
zonedata[d['ugcCode']]['fullStateName']
|
||||||
else:
|
elif fipsdata.has_key(d['ugcCode']):
|
||||||
marineState = checkMarineState(d['ugcCode'])
|
if fipsdata[d['ugcCode']].has_key('fullStateName'):
|
||||||
if marineState is not None:
|
d["fullStateName"] = \
|
||||||
d['fullStateName'] = marineState
|
fipsdata[d['ugcCode']]['fullStateName']
|
||||||
|
else:
|
||||||
|
marineState = checkMarineState(d['ugcCode'])
|
||||||
if areadict.has_key(ean) and d != areadict[ean]:
|
if marineState is not None:
|
||||||
LogStream.logDiag("Mismatch of definitions in " +\
|
d['fullStateName'] = marineState
|
||||||
"AreaDictionary creation. EditAreaName=", ean,
|
|
||||||
"AreaDict=\n", areadict[ean], "\nIgnored=\n", d)
|
|
||||||
else:
|
if areadict.has_key(ean) and d != areadict[ean]:
|
||||||
areadict[ean] = d
|
LogStream.logDiag("Mismatch of definitions in " +\
|
||||||
except:
|
"AreaDictionary creation. EditAreaName=", ean,
|
||||||
LogStream.logProblem("Problem with ", ean, LogStream.exc())
|
"AreaDict=\n", areadict[ean], "\nIgnored=\n", d)
|
||||||
|
else:
|
||||||
|
areadict[ean] = d
|
||||||
|
except:
|
||||||
|
LogStream.logProblem("Problem with ", ean, LogStream.exc())
|
||||||
|
|
||||||
s = """
|
s = """
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -18,29 +18,34 @@
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
# further licensing information.
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
#
|
||||||
"""Generate site specific text products.
|
# Generate site specific text products.
|
||||||
|
#
|
||||||
This script is run at install time to customize a set of the text products
|
# This script is run at install time to customize a set of the text products
|
||||||
for a given site.
|
# for a given site.
|
||||||
|
#
|
||||||
SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
Jun 23, 2008 1180 jelkins Initial creation
|
# Jun 23, 2008 1180 jelkins Initial creation
|
||||||
Jul 08, 2008 1222 jelkins Modified for use within Java
|
# Jul 08, 2008 1222 jelkins Modified for use within Java
|
||||||
Jul 09, 2008 1222 jelkins Split command line loader from class
|
# Jul 09, 2008 1222 jelkins Split command line loader from class
|
||||||
Jul 24, 2012 #944 dgilling Refactored to support separate
|
# Jul 24, 2012 #944 dgilling Refactored to support separate
|
||||||
generation of products and utilities.
|
# generation of products and utilities.
|
||||||
Sep 07, 2012 #1150 dgilling Ensure all necessary dirs get created.
|
# Sep 07, 2012 #1150 dgilling Ensure all necessary dirs get created.
|
||||||
May 12, 2014 2536 bclement renamed text plugin to include uf in name
|
# May 12, 2014 2536 bclement renamed text plugin to include uf in name
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed how SiteInfo is loaded.
|
||||||
@author: jelkins
|
# Fixed logging to log to a file
|
||||||
"""
|
# Cleaned up how protected file updates are returned
|
||||||
|
#
|
||||||
|
# @author: jelkins
|
||||||
|
#
|
||||||
|
##
|
||||||
__version__ = "1.0"
|
__version__ = "1.0"
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
|
import JUtil
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
from os.path import join
|
from os.path import join
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
|
@ -64,7 +69,6 @@ from sys import path
|
||||||
path.append(join(LIBRARY_DIR,"../"))
|
path.append(join(LIBRARY_DIR,"../"))
|
||||||
path.append(join(PREFERENCE_DIR,"../"))
|
path.append(join(PREFERENCE_DIR,"../"))
|
||||||
|
|
||||||
from library.SiteInfo import SiteInfo as SITE_INFO
|
|
||||||
from preferences.configureTextProducts import NWSProducts as NWS_PRODUCTS
|
from preferences.configureTextProducts import NWSProducts as NWS_PRODUCTS
|
||||||
|
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
|
@ -73,12 +77,21 @@ from os.path import abspath
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
# ---- Setup Logging ----------------------------------------------------------
|
# ---- Setup Logging ----------------------------------------------------------
|
||||||
LOG_CONF = join(SCRIPT_DIR,"preferences","logging.conf")
|
import logging
|
||||||
|
from time import strftime, gmtime
|
||||||
|
timeStamp = strftime("%Y%m%d", gmtime())
|
||||||
|
logFile = '/awips2/edex/logs/configureTextProducts-'+timeStamp+'.log'
|
||||||
|
|
||||||
import logging.config
|
LOG = logging.getLogger("configureTextProducts")
|
||||||
logging.config.fileConfig(LOG_CONF)
|
LOG.setLevel(logging.DEBUG)
|
||||||
|
handler = logging.FileHandler(logFile)
|
||||||
|
handler.setLevel(logging.DEBUG)
|
||||||
|
formatter = logging.Formatter("%(levelname)-5s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s")
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
for h in LOG.handlers:
|
||||||
|
LOG.removeHandler(h)
|
||||||
|
LOG.addHandler(handler)
|
||||||
|
|
||||||
LOG = logging.getLogger("Generator")
|
|
||||||
|
|
||||||
# List of protected files
|
# List of protected files
|
||||||
fileList = []
|
fileList = []
|
||||||
|
@ -96,6 +109,17 @@ ProcessDirectories = [
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# This will "load" SiteInfo in a more complicated way
|
||||||
|
# than 'from SiteCFG import SiteInfo'.
|
||||||
|
from LockingFile import File
|
||||||
|
|
||||||
|
pathManager = PathManagerFactory.getPathManager()
|
||||||
|
lf = pathManager.getStaticLocalizationFile(LocalizationType.COMMON_STATIC, "python/gfe/SiteCFG.py")
|
||||||
|
with File(lf.getFile(), lf.getName(), 'r') as file:
|
||||||
|
fileContents = file.read()
|
||||||
|
|
||||||
|
exec fileContents
|
||||||
|
|
||||||
|
|
||||||
class Generator():
|
class Generator():
|
||||||
"""Generates site specific text products from base template files.
|
"""Generates site specific text products from base template files.
|
||||||
|
@ -118,7 +142,7 @@ class Generator():
|
||||||
|
|
||||||
@raise LookupError: when the site ID is invalid
|
@raise LookupError: when the site ID is invalid
|
||||||
"""
|
"""
|
||||||
if siteId in SITE_INFO.keys():
|
if siteId in SiteInfo.keys():
|
||||||
self.__siteId = siteId
|
self.__siteId = siteId
|
||||||
else:
|
else:
|
||||||
raise LookupError, ' unknown WFO: ' + siteId
|
raise LookupError, ' unknown WFO: ' + siteId
|
||||||
|
@ -179,6 +203,8 @@ class Generator():
|
||||||
created += self.__create(dirInfo['src'], dirInfo['dest'])
|
created += self.__create(dirInfo['src'], dirInfo['dest'])
|
||||||
LOG.info("%d text products created" % created)
|
LOG.info("%d text products created" % created)
|
||||||
LOG.debug("Configuration of Text Products Finish")
|
LOG.debug("Configuration of Text Products Finish")
|
||||||
|
|
||||||
|
return JUtil.pylistToJavaStringList(self.getProtectedFiles())
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
"""Delete text products"""
|
"""Delete text products"""
|
||||||
|
@ -216,11 +242,11 @@ class Generator():
|
||||||
|
|
||||||
LOG.debug("PIL Information for all sites Begin.......")
|
LOG.debug("PIL Information for all sites Begin.......")
|
||||||
|
|
||||||
for site in SITE_INFO.keys():
|
for site in SiteInfo.keys():
|
||||||
LOG.info("--------------------------------------------")
|
LOG.info("--------------------------------------------")
|
||||||
LOG.info("%s %s %s" % (site,
|
LOG.info("%s %s %s" % (site,
|
||||||
SITE_INFO[site]['fullStationID'],
|
SiteInfo[site]['fullStationID'],
|
||||||
SITE_INFO[site]['wfoCityState']))
|
SiteInfo[site]['wfoCityState']))
|
||||||
pils = self.__createPilDictionary(site)
|
pils = self.__createPilDictionary(site)
|
||||||
self.__printPilDictionary(pils)
|
self.__printPilDictionary(pils)
|
||||||
found += len(pils)
|
found += len(pils)
|
||||||
|
@ -303,11 +329,11 @@ class Generator():
|
||||||
|
|
||||||
subDict = {}
|
subDict = {}
|
||||||
subDict['<site>'] = siteid.strip()
|
subDict['<site>'] = siteid.strip()
|
||||||
subDict['<region>'] = SITE_INFO[siteid]['region'].strip()
|
subDict['<region>'] = SiteInfo[siteid]['region'].strip()
|
||||||
subDict['<wfoCityState>'] = SITE_INFO[siteid]['wfoCityState'].strip()
|
subDict['<wfoCityState>'] = SiteInfo[siteid]['wfoCityState'].strip()
|
||||||
subDict['<wfoCity>'] = SITE_INFO[siteid]['wfoCity'].strip()
|
subDict['<wfoCity>'] = SiteInfo[siteid]['wfoCity'].strip()
|
||||||
subDict['<fullStationID>'] = SITE_INFO[siteid]['fullStationID'].strip()
|
subDict['<fullStationID>'] = SiteInfo[siteid]['fullStationID'].strip()
|
||||||
subDict['<state>'] = SITE_INFO[siteid]['state'].strip()
|
subDict['<state>'] = SiteInfo[siteid]['state'].strip()
|
||||||
if product is not None:
|
if product is not None:
|
||||||
subDict['<product>'] = product.strip()
|
subDict['<product>'] = product.strip()
|
||||||
if ProductToStandardMapping.has_key(product):
|
if ProductToStandardMapping.has_key(product):
|
||||||
|
@ -342,7 +368,7 @@ class Generator():
|
||||||
|
|
||||||
subDict = {}
|
subDict = {}
|
||||||
subDict['Site'] = siteid.strip()
|
subDict['Site'] = siteid.strip()
|
||||||
subDict['Region'] = SITE_INFO[siteid]['region'].strip()
|
subDict['Region'] = SiteInfo[siteid]['region'].strip()
|
||||||
if product is not None:
|
if product is not None:
|
||||||
subDict['Product'] = product.strip()
|
subDict['Product'] = product.strip()
|
||||||
if pilInfo is not None and pilInfo.has_key("pil") and multiPilFlag:
|
if pilInfo is not None and pilInfo.has_key("pil") and multiPilFlag:
|
||||||
|
@ -378,10 +404,10 @@ class Generator():
|
||||||
LOG.info("%s %s" % (p,pillist[p]))
|
LOG.info("%s %s" % (p,pillist[p]))
|
||||||
|
|
||||||
def __createPilDictionary(self, siteid):
|
def __createPilDictionary(self, siteid):
|
||||||
"""Update the SITE_INFO with a PIL dictionary
|
"""Update the SiteInfo with a PIL dictionary
|
||||||
|
|
||||||
Read the a2a data from the database, create PIL information, and add the information
|
Read the a2a data from the database, create PIL information, and add the information
|
||||||
to the SITE_INFO dictionary.
|
to the SiteInfo dictionary.
|
||||||
|
|
||||||
@param site: the site for which PIL information is created
|
@param site: the site for which PIL information is created
|
||||||
@type site: string
|
@type site: string
|
||||||
|
@ -390,7 +416,7 @@ class Generator():
|
||||||
@rtype: dictionary
|
@rtype: dictionary
|
||||||
"""
|
"""
|
||||||
|
|
||||||
siteD = SITE_INFO[siteid]
|
siteD = SiteInfo[siteid]
|
||||||
stationID4 = siteD['fullStationID']
|
stationID4 = siteD['fullStationID']
|
||||||
|
|
||||||
from com.raytheon.uf.edex.plugin.text.dao import AfosToAwipsDao
|
from com.raytheon.uf.edex.plugin.text.dao import AfosToAwipsDao
|
||||||
|
@ -435,7 +461,7 @@ class Generator():
|
||||||
e['textdbPil'] = pil
|
e['textdbPil'] = pil
|
||||||
e['awipsWANPil'] = site4 + pil[3:]
|
e['awipsWANPil'] = site4 + pil[3:]
|
||||||
d.append(e)
|
d.append(e)
|
||||||
siteD[nnn] = d #store the pil dictionary back into the SITE_INFO
|
siteD[nnn] = d #store the pil dictionary back into the SiteInfo
|
||||||
|
|
||||||
return pillist
|
return pillist
|
||||||
|
|
||||||
|
@ -572,8 +598,8 @@ class Generator():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# extract out the pil information from the dictionary
|
# extract out the pil information from the dictionary
|
||||||
if SITE_INFO[siteid].has_key(pilNames[0]):
|
if SiteInfo[siteid].has_key(pilNames[0]):
|
||||||
pils = SITE_INFO[siteid][pilNames[0]]
|
pils = SiteInfo[siteid][pilNames[0]]
|
||||||
else:
|
else:
|
||||||
#set pils to empty list if none defined
|
#set pils to empty list if none defined
|
||||||
pils = [{'awipsWANPil': 'kssscccnnn',
|
pils = [{'awipsWANPil': 'kssscccnnn',
|
||||||
|
@ -728,4 +754,7 @@ class Generator():
|
||||||
LOG.debug(" Deleting Existing Baseline Templates Finished........")
|
LOG.debug(" Deleting Existing Baseline Templates Finished........")
|
||||||
|
|
||||||
return productsRemoved
|
return productsRemoved
|
||||||
|
|
||||||
|
def runFromJava(siteId, destinationDir):
|
||||||
|
generator = Generator()
|
||||||
|
return generator.create(siteId, destinationDir)
|
|
@ -1,922 +0,0 @@
|
||||||
##
|
|
||||||
# This software was developed and / or modified by Raytheon Company,
|
|
||||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
|
||||||
#
|
|
||||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
|
||||||
# This software product contains export-restricted data whose
|
|
||||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
|
||||||
# to non-U.S. persons whether in the United States or abroad requires
|
|
||||||
# an export license or other authorization.
|
|
||||||
#
|
|
||||||
# Contractor Name: Raytheon Company
|
|
||||||
# Contractor Address: 6825 Pine Street, Suite 340
|
|
||||||
# Mail Stop B8
|
|
||||||
# Omaha, NE 68106
|
|
||||||
# 402.291.0100
|
|
||||||
#
|
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
|
||||||
# further licensing information.
|
|
||||||
##
|
|
||||||
#Contains information about products, regions, etc. for each site
|
|
||||||
#in the country.
|
|
||||||
|
|
||||||
#region= two-letter regional identifier, mainly used for installation of
|
|
||||||
# text product templates
|
|
||||||
SiteInfo= {
|
|
||||||
'ABQ': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KABQ',
|
|
||||||
'wfoCityState': 'ALBUQUERQUE NM',
|
|
||||||
'wfoCity': 'ALBUQUERQUE',
|
|
||||||
'state': 'NEW MEXICO',
|
|
||||||
},
|
|
||||||
'ABR': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KABR',
|
|
||||||
'wfoCityState': 'ABERDEEN SD',
|
|
||||||
'wfoCity': 'ABERDEEN',
|
|
||||||
'state': 'SOUTH DAKOTA',
|
|
||||||
},
|
|
||||||
'AER': {
|
|
||||||
'region': 'AR',
|
|
||||||
'fullStationID': 'PAFC',
|
|
||||||
'wfoCityState': 'ANCHORAGE AK',
|
|
||||||
'wfoCity': 'ANCHORAGE',
|
|
||||||
'state': 'ALASKA',
|
|
||||||
},
|
|
||||||
'AFC': {
|
|
||||||
'region': 'AR',
|
|
||||||
'fullStationID': 'PAFC',
|
|
||||||
'wfoCityState': 'ANCHORAGE AK',
|
|
||||||
'wfoCity': 'ANCHORAGE',
|
|
||||||
'state': 'ALASKA',
|
|
||||||
},
|
|
||||||
'AFG': {
|
|
||||||
'region': 'AR',
|
|
||||||
'fullStationID': 'PAFG',
|
|
||||||
'wfoCityState': 'FAIRBANKS AK',
|
|
||||||
'wfoCity': 'FAIRBANKS',
|
|
||||||
'state': 'ALASKA',
|
|
||||||
},
|
|
||||||
'AJK': {
|
|
||||||
'region': 'AR',
|
|
||||||
'fullStationID': 'PAJK',
|
|
||||||
'wfoCityState': 'JUNEAU AK',
|
|
||||||
'wfoCity': 'JUNEAU',
|
|
||||||
'state': 'ALASKA',
|
|
||||||
},
|
|
||||||
'AKQ': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KAKQ',
|
|
||||||
'wfoCityState': 'WAKEFIELD VA',
|
|
||||||
'wfoCity': 'WAKEFIELD',
|
|
||||||
'state': 'VIRGINIA',
|
|
||||||
},
|
|
||||||
'ALU': {
|
|
||||||
'region': 'AR',
|
|
||||||
'fullStationID': 'PAFC',
|
|
||||||
'wfoCityState': 'ANCHORAGE AK',
|
|
||||||
'wfoCity': 'ANCHORAGE',
|
|
||||||
'state': 'ALASKA',
|
|
||||||
},
|
|
||||||
'ALY': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KALY',
|
|
||||||
'wfoCityState': 'ALBANY NY',
|
|
||||||
'wfoCity': 'ALBANY',
|
|
||||||
'state': 'NEW YORK',
|
|
||||||
},
|
|
||||||
'AMA': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KAMA',
|
|
||||||
'wfoCityState': 'AMARILLO TX',
|
|
||||||
'wfoCity': 'AMARILLO',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'APX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KAPX',
|
|
||||||
'wfoCityState': 'GAYLORD MI',
|
|
||||||
'wfoCity': 'GAYLORD',
|
|
||||||
'state': 'MICHIGAN',
|
|
||||||
},
|
|
||||||
'ARX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KARX',
|
|
||||||
'wfoCityState': 'LA CROSSE WI',
|
|
||||||
'wfoCity': 'LA CROSSE',
|
|
||||||
'state': 'WISCONSIN',
|
|
||||||
},
|
|
||||||
'BGM': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KBGM',
|
|
||||||
'wfoCityState': 'BINGHAMTON NY',
|
|
||||||
'wfoCity': 'BINGHAMTON',
|
|
||||||
'state': 'NEW YORK',
|
|
||||||
},
|
|
||||||
'BIS': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KBIS',
|
|
||||||
'wfoCityState': 'BISMARCK ND',
|
|
||||||
'wfoCity': 'BISMARCK',
|
|
||||||
'state': 'NORTH DAKOTA',
|
|
||||||
},
|
|
||||||
'BMX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KBMX',
|
|
||||||
'wfoCityState': 'BIRMINGHAM AL',
|
|
||||||
'wfoCity': 'BIRMINGHAM',
|
|
||||||
'state': 'ALABAMA',
|
|
||||||
},
|
|
||||||
'BOI': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KBOI',
|
|
||||||
'wfoCityState': 'BOISE ID',
|
|
||||||
'wfoCity': 'BOISE',
|
|
||||||
'state': 'IDAHO',
|
|
||||||
},
|
|
||||||
'BOU': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KBOU',
|
|
||||||
'wfoCityState': 'DENVER CO',
|
|
||||||
'wfoCity': 'DENVER',
|
|
||||||
'state': 'COLORADO',
|
|
||||||
},
|
|
||||||
'BOX': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KBOX',
|
|
||||||
'wfoCityState': 'TAUNTON MA',
|
|
||||||
'wfoCity': 'TAUNTON',
|
|
||||||
'state': 'MASSACHUSETTS',
|
|
||||||
},
|
|
||||||
'BRO': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KBRO',
|
|
||||||
'wfoCityState': 'BROWNSVILLE TX',
|
|
||||||
'wfoCity': 'BROWNSVILLE',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'BTV': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KBTV',
|
|
||||||
'wfoCityState': 'BURLINGTON VT',
|
|
||||||
'wfoCity': 'BURLINGTON',
|
|
||||||
'state': 'VERMONT',
|
|
||||||
},
|
|
||||||
'BUF': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KBUF',
|
|
||||||
'wfoCityState': 'BUFFALO NY',
|
|
||||||
'wfoCity': 'BUFFALO',
|
|
||||||
'state': 'NEW YORK',
|
|
||||||
},
|
|
||||||
'BYZ': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KBYZ',
|
|
||||||
'wfoCityState': 'BILLINGS MT',
|
|
||||||
'wfoCity': 'BILLINGS',
|
|
||||||
'state': 'MONTANA',
|
|
||||||
},
|
|
||||||
'CAE': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KCAE',
|
|
||||||
'wfoCityState': 'COLUMBIA SC',
|
|
||||||
'wfoCity': 'COLUMBIA',
|
|
||||||
'state': 'SOUTH CAROLINA',
|
|
||||||
},
|
|
||||||
'CAR': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KCAR',
|
|
||||||
'wfoCityState': 'CARIBOU ME',
|
|
||||||
'wfoCity': 'CARIBOU',
|
|
||||||
'state': 'MAINE',
|
|
||||||
},
|
|
||||||
'CHS': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KCHS',
|
|
||||||
'wfoCityState': 'CHARLESTON SC',
|
|
||||||
'wfoCity': 'CHARLESTON',
|
|
||||||
'state': 'SOUTH CAROLINA',
|
|
||||||
},
|
|
||||||
'CLE': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KCLE',
|
|
||||||
'wfoCityState': 'CLEVELAND OH',
|
|
||||||
'wfoCity': 'CLEVELAND',
|
|
||||||
'state': 'OHIO',
|
|
||||||
},
|
|
||||||
'CRP': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KCRP',
|
|
||||||
'wfoCityState': 'CORPUS CHRISTI TX',
|
|
||||||
'wfoCity': 'CORPUS CHRISTI',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'CTP': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KCTP',
|
|
||||||
'wfoCityState': 'STATE COLLEGE PA',
|
|
||||||
'wfoCity': 'STATE COLLEGE',
|
|
||||||
'state': 'PENNSYLVANIA',
|
|
||||||
},
|
|
||||||
'CYS': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KCYS',
|
|
||||||
'wfoCityState': 'CHEYENNE WY',
|
|
||||||
'wfoCity': 'CHEYENNE',
|
|
||||||
'state': 'WYOMING',
|
|
||||||
},
|
|
||||||
'DDC': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KDDC',
|
|
||||||
'wfoCityState': 'DODGE CITY KS',
|
|
||||||
'wfoCity': 'DODGE CITY',
|
|
||||||
'state': 'KANSAS',
|
|
||||||
},
|
|
||||||
'DLH': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KDLH',
|
|
||||||
'wfoCityState': 'DULUTH MN',
|
|
||||||
'wfoCity': 'DULUTH',
|
|
||||||
'state': 'MINNESOTA',
|
|
||||||
},
|
|
||||||
'DMX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KDMX',
|
|
||||||
'wfoCityState': 'DES MOINES IA',
|
|
||||||
'wfoCity': 'DES MOINES',
|
|
||||||
'state': 'IOWA',
|
|
||||||
},
|
|
||||||
'DTX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KDTX',
|
|
||||||
'wfoCityState': 'DETROIT/PONTIAC MI',
|
|
||||||
'wfoCity': 'DETROIT/PONTIAC',
|
|
||||||
'state': 'MICHIGAN',
|
|
||||||
},
|
|
||||||
'DVN': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KDVN',
|
|
||||||
'wfoCityState': 'QUAD CITIES IA IL',
|
|
||||||
'wfoCity': 'QUAD CITIES',
|
|
||||||
'state': 'ILLINOIS',
|
|
||||||
},
|
|
||||||
'EAX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KEAX',
|
|
||||||
'wfoCityState': 'KANSAS CITY/PLEASANT HILL MO',
|
|
||||||
'wfoCity': 'KANSAS CITY/PLEASANT HILL',
|
|
||||||
'state': 'MISSOURI',
|
|
||||||
},
|
|
||||||
'EKA': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KEKA',
|
|
||||||
'wfoCityState': 'EUREKA CA',
|
|
||||||
'wfoCity': 'EUREKA',
|
|
||||||
'state': 'CALIFORNIA',
|
|
||||||
},
|
|
||||||
'EPZ': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KEPZ',
|
|
||||||
'wfoCityState': 'EL PASO TX/SANTA TERESA NM',
|
|
||||||
'wfoCity': 'EL PASO TX/SANTA TERESA',
|
|
||||||
'state': 'NEW MEXICO',
|
|
||||||
},
|
|
||||||
'EWX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KEWX',
|
|
||||||
'wfoCityState': 'AUSTIN/SAN ANTONIO TX',
|
|
||||||
'wfoCity': 'AUSTIN/SAN ANTONIO',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'FFC': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KFFC',
|
|
||||||
'wfoCityState': 'PEACHTREE CITY GA',
|
|
||||||
'wfoCity': 'PEACHTREE CITY',
|
|
||||||
'state': 'GEORGIA',
|
|
||||||
},
|
|
||||||
'FGF': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KFGF',
|
|
||||||
'wfoCityState': 'GRAND FORKS ND',
|
|
||||||
'wfoCity': 'GRAND FORKS',
|
|
||||||
'state': 'NORTH DAKOTA',
|
|
||||||
},
|
|
||||||
'FGZ': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KFGZ',
|
|
||||||
'wfoCityState': 'FLAGSTAFF AZ',
|
|
||||||
'wfoCity': 'FLAGSTAFF',
|
|
||||||
'state': 'ARIZONA',
|
|
||||||
},
|
|
||||||
'FSD': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KFSD',
|
|
||||||
'wfoCityState': 'SIOUX FALLS SD',
|
|
||||||
'wfoCity': 'SIOUX FALLS',
|
|
||||||
'state': 'SOUTH DAKOTA',
|
|
||||||
},
|
|
||||||
'FWD': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KFWD',
|
|
||||||
'wfoCityState': 'FORT WORTH TX',
|
|
||||||
'wfoCity': 'FORT WORTH',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'GGW': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KGGW',
|
|
||||||
'wfoCityState': 'GLASGOW MT',
|
|
||||||
'wfoCity': 'GLASGOW',
|
|
||||||
'state': 'MONTANA',
|
|
||||||
},
|
|
||||||
'GID': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KGID',
|
|
||||||
'wfoCityState': 'HASTINGS NE',
|
|
||||||
'wfoCity': 'HASTINGS',
|
|
||||||
'state': 'NEBRASKA',
|
|
||||||
},
|
|
||||||
'GJT': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KGJT',
|
|
||||||
'wfoCityState': 'GRAND JUNCTION CO',
|
|
||||||
'wfoCity': 'GRAND JUNCTION',
|
|
||||||
'state': 'COLORADO',
|
|
||||||
},
|
|
||||||
'GLD': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KGLD',
|
|
||||||
'wfoCityState': 'GOODLAND KS',
|
|
||||||
'wfoCity': 'GOODLAND',
|
|
||||||
'state': 'KANSAS',
|
|
||||||
},
|
|
||||||
'GRB': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KGRB',
|
|
||||||
'wfoCityState': 'GREEN BAY WI',
|
|
||||||
'wfoCity': 'GREEN BAY',
|
|
||||||
'state': 'WISCONSIN',
|
|
||||||
},
|
|
||||||
'GRR': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KGRR',
|
|
||||||
'wfoCityState': 'GRAND RAPIDS MI',
|
|
||||||
'wfoCity': 'GRAND RAPIDS',
|
|
||||||
'state': 'MICHIGAN',
|
|
||||||
},
|
|
||||||
'GSP': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KGSP',
|
|
||||||
'wfoCityState': 'GREENVILLE-SPARTANBURG SC',
|
|
||||||
'wfoCity': 'GREENVILLE-SPARTANBURG',
|
|
||||||
'state': 'SOUTH CAROLINA',
|
|
||||||
},
|
|
||||||
'GUM': {
|
|
||||||
'region': 'PR',
|
|
||||||
'fullStationID': 'PGUM',
|
|
||||||
'wfoCityState': 'TIYAN GU',
|
|
||||||
'wfoCity': 'TIYAN',
|
|
||||||
'state': 'GUAM',
|
|
||||||
},
|
|
||||||
'GYX': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KGYX',
|
|
||||||
'wfoCityState': 'GRAY ME',
|
|
||||||
'wfoCity': 'GRAY',
|
|
||||||
'state': 'MAINE',
|
|
||||||
},
|
|
||||||
'HFO': {
|
|
||||||
'region': 'PR',
|
|
||||||
'fullStationID': 'PHFO',
|
|
||||||
'wfoCityState': 'HONOLULU HI',
|
|
||||||
'wfoCity': 'HONOLULU',
|
|
||||||
'state': 'HAWAII',
|
|
||||||
},
|
|
||||||
'HGX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KHGX',
|
|
||||||
'wfoCityState': 'HOUSTON/GALVESTON TX',
|
|
||||||
'wfoCity': 'HOUSTON/GALVESTON',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'HNX': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KHNX',
|
|
||||||
'wfoCityState': 'HANFORD CA',
|
|
||||||
'wfoCity': 'HANFORD',
|
|
||||||
'state': 'CALIFORNIA',
|
|
||||||
},
|
|
||||||
'HUN': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KHUN',
|
|
||||||
'wfoCityState': 'HUNTSVILLE AL',
|
|
||||||
'wfoCity': 'HUNTSVILLE',
|
|
||||||
'state': 'ALABAMA',
|
|
||||||
},
|
|
||||||
'ICT': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KICT',
|
|
||||||
'wfoCityState': 'WICHITA KS',
|
|
||||||
'wfoCity': 'WICHITA',
|
|
||||||
'state': 'KANSAS',
|
|
||||||
},
|
|
||||||
'ILM': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KILM',
|
|
||||||
'wfoCityState': 'WILMINGTON NC',
|
|
||||||
'wfoCity': 'WILMINGTON',
|
|
||||||
'state': 'NORTH CAROLINA',
|
|
||||||
},
|
|
||||||
'ILN': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KILN',
|
|
||||||
'wfoCityState': 'WILMINGTON OH',
|
|
||||||
'wfoCity': 'WILMINGTON',
|
|
||||||
'state': 'OHIO',
|
|
||||||
},
|
|
||||||
'ILX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KILX',
|
|
||||||
'wfoCityState': 'LINCOLN IL',
|
|
||||||
'wfoCity': 'LINCOLN',
|
|
||||||
'state': 'ILLINOIS',
|
|
||||||
},
|
|
||||||
'IND': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KIND',
|
|
||||||
'wfoCityState': 'INDIANAPOLIS IN',
|
|
||||||
'wfoCity': 'INDIANAPOLIS',
|
|
||||||
'state': 'INDIANA',
|
|
||||||
},
|
|
||||||
'IWX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KIWX',
|
|
||||||
'wfoCityState': 'NORTHERN INDIANA',
|
|
||||||
'wfoCity': 'NORTHERN INDIANA',
|
|
||||||
'state': 'INDIANA',
|
|
||||||
},
|
|
||||||
'JAN': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KJAN',
|
|
||||||
'wfoCityState': 'JACKSON MS',
|
|
||||||
'wfoCity': 'JACKSON',
|
|
||||||
'state': 'MISSISSIPPI',
|
|
||||||
},
|
|
||||||
'JAX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KJAX',
|
|
||||||
'wfoCityState': 'JACKSONVILLE FL',
|
|
||||||
'wfoCity': 'JACKSONVILLE',
|
|
||||||
'state': 'FLORIDA',
|
|
||||||
},
|
|
||||||
'JKL': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KJKL',
|
|
||||||
'wfoCityState': 'JACKSON KY',
|
|
||||||
'wfoCity': 'JACKSON',
|
|
||||||
'state': 'KENTUCKY',
|
|
||||||
},
|
|
||||||
'KEY': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KKEY',
|
|
||||||
'wfoCityState': 'KEY WEST FL',
|
|
||||||
'wfoCity': 'KEY WEST',
|
|
||||||
'state': 'FLORIDA',
|
|
||||||
},
|
|
||||||
'LBF': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KLBF',
|
|
||||||
'wfoCityState': 'NORTH PLATTE NE',
|
|
||||||
'wfoCity': 'NORTH PLATTE',
|
|
||||||
'state': 'NEBRASKA',
|
|
||||||
},
|
|
||||||
'LCH': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KLCH',
|
|
||||||
'wfoCityState': 'LAKE CHARLES LA',
|
|
||||||
'wfoCity': 'LAKE CHARLES',
|
|
||||||
'state': 'LOUISIANA',
|
|
||||||
},
|
|
||||||
'LIX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KLIX',
|
|
||||||
'wfoCityState': 'NEW ORLEANS LA',
|
|
||||||
'wfoCity': 'NEW ORLEANS',
|
|
||||||
'state': 'LOUISIANA',
|
|
||||||
},
|
|
||||||
'LKN': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KLKN',
|
|
||||||
'wfoCityState': 'ELKO NV',
|
|
||||||
'wfoCity': 'ELKO',
|
|
||||||
'state': 'NEVADA',
|
|
||||||
},
|
|
||||||
'LMK': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KLMK',
|
|
||||||
'wfoCityState': 'LOUISVILLE KY',
|
|
||||||
'wfoCity': 'LOUISVILLE',
|
|
||||||
'state': 'KENTUCKY',
|
|
||||||
},
|
|
||||||
'LOT': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KLOT',
|
|
||||||
'wfoCityState': 'CHICAGO IL',
|
|
||||||
'wfoCity': 'CHICAGO',
|
|
||||||
'state': 'ILLINOIS',
|
|
||||||
},
|
|
||||||
'LOX': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KLOX',
|
|
||||||
'wfoCityState': 'LOS ANGELES/OXNARD CA',
|
|
||||||
'wfoCity': 'LOS ANGELES/OXNARD',
|
|
||||||
'state': 'CALIFORNIA',
|
|
||||||
},
|
|
||||||
'LSX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KLSX',
|
|
||||||
'wfoCityState': 'ST LOUIS MO',
|
|
||||||
'wfoCity': 'ST LOUIS',
|
|
||||||
'state': 'MISSOURI',
|
|
||||||
},
|
|
||||||
'LUB': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KLUB',
|
|
||||||
'wfoCityState': 'LUBBOCK TX',
|
|
||||||
'wfoCity': 'LUBBOCK',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'LWX': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KLWX',
|
|
||||||
'wfoCityState': 'BALTIMORE MD/WASHINGTON DC',
|
|
||||||
'wfoCity': 'BALTIMORE MD/WASHINGTON',
|
|
||||||
'state': 'WASHINGTON DC',
|
|
||||||
},
|
|
||||||
'LZK': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KLZK',
|
|
||||||
'wfoCityState': 'LITTLE ROCK AR',
|
|
||||||
'wfoCity': 'LITTLE ROCK',
|
|
||||||
'state': 'ARKANSAS',
|
|
||||||
},
|
|
||||||
'MAF': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KMAF',
|
|
||||||
'wfoCityState': 'MIDLAND/ODESSA TX',
|
|
||||||
'wfoCity': 'MIDLAND/ODESSA',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'MEG': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KMEG',
|
|
||||||
'wfoCityState': 'MEMPHIS TN',
|
|
||||||
'wfoCity': 'MEMPHIS',
|
|
||||||
'state': 'TENNESSEE',
|
|
||||||
},
|
|
||||||
'MFL': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KMFL',
|
|
||||||
'wfoCityState': 'MIAMI FL',
|
|
||||||
'wfoCity': 'MIAMI',
|
|
||||||
'state': 'FLORIDA',
|
|
||||||
},
|
|
||||||
'MFR': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KMFR',
|
|
||||||
'wfoCityState': 'MEDFORD OR',
|
|
||||||
'wfoCity': 'MEDFORD',
|
|
||||||
'state': 'OREGON',
|
|
||||||
},
|
|
||||||
'MHX': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KMHX',
|
|
||||||
'wfoCityState': 'NEWPORT/MOREHEAD CITY NC',
|
|
||||||
'wfoCity': 'NEWPORT/MOREHEAD CITY',
|
|
||||||
'state': 'NORTH CAROLINA',
|
|
||||||
},
|
|
||||||
'MKX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KMKX',
|
|
||||||
'wfoCityState': 'MILWAUKEE/SULLIVAN WI',
|
|
||||||
'wfoCity': 'MILWAUKEE/SULLIVAN',
|
|
||||||
'state': 'WISCONSIN',
|
|
||||||
},
|
|
||||||
'MLB': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KMLB',
|
|
||||||
'wfoCityState': 'MELBOURNE FL',
|
|
||||||
'wfoCity': 'MELBOURNE',
|
|
||||||
'state': 'FLORIDA',
|
|
||||||
},
|
|
||||||
'MOB': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KMOB',
|
|
||||||
'wfoCityState': 'MOBILE AL',
|
|
||||||
'wfoCity': 'MOBILE',
|
|
||||||
'state': 'ALABAMA',
|
|
||||||
},
|
|
||||||
'MPX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KMPX',
|
|
||||||
'wfoCityState': 'TWIN CITIES/CHANHASSEN MN',
|
|
||||||
'wfoCity': 'TWIN CITIES/CHANHASSEN',
|
|
||||||
'state': 'MINNESOTA',
|
|
||||||
},
|
|
||||||
'MQT': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KMQT',
|
|
||||||
'wfoCityState': 'MARQUETTE MI',
|
|
||||||
'wfoCity': 'MARQUETTE',
|
|
||||||
'state': 'MICHIGAN',
|
|
||||||
},
|
|
||||||
'MRX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KMRX',
|
|
||||||
'wfoCityState': 'MORRISTOWN TN',
|
|
||||||
'wfoCity': 'MORRISTOWN',
|
|
||||||
'state': 'TENNESSEE',
|
|
||||||
},
|
|
||||||
'MSO': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KMSO',
|
|
||||||
'wfoCityState': 'MISSOULA MT',
|
|
||||||
'wfoCity': 'MISSOULA',
|
|
||||||
'state': 'MONTANA',
|
|
||||||
},
|
|
||||||
'MTR': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KMTR',
|
|
||||||
'wfoCityState': 'SAN FRANCISCO CA',
|
|
||||||
'wfoCity': 'SAN FRANCISCO',
|
|
||||||
'state': 'CALIFORNIA',
|
|
||||||
},
|
|
||||||
'NH1': {
|
|
||||||
'region': 'NC',
|
|
||||||
'fullStationID': 'KNHC',
|
|
||||||
'wfoCityState': 'NATIONAL HURRICANE CENTER MIAMI FL',
|
|
||||||
'wfoCity': 'MIAMI',
|
|
||||||
'state': '',
|
|
||||||
},
|
|
||||||
'NH2': {
|
|
||||||
'region': 'NC',
|
|
||||||
'fullStationID': 'KNHC',
|
|
||||||
'wfoCityState': 'NATIONAL HURRICANE CENTER MIAMI FL',
|
|
||||||
'wfoCity': 'MIAMI',
|
|
||||||
'state': '',
|
|
||||||
},
|
|
||||||
'OAX': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KOAX',
|
|
||||||
'wfoCityState': 'OMAHA/VALLEY NE',
|
|
||||||
'wfoCity': 'OMAHA/VALLEY',
|
|
||||||
'state': 'NEBRASKA',
|
|
||||||
},
|
|
||||||
'OHX': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KOHX',
|
|
||||||
'wfoCityState': 'NASHVILLE TN',
|
|
||||||
'wfoCity': 'NASHVILLE',
|
|
||||||
'state': 'TENNESSEE',
|
|
||||||
},
|
|
||||||
'OKX': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KOKX',
|
|
||||||
'wfoCityState': 'UPTON NY',
|
|
||||||
'wfoCity': 'UPTON',
|
|
||||||
'state': 'NEW YORK',
|
|
||||||
},
|
|
||||||
'ONA': {
|
|
||||||
'region': 'NC',
|
|
||||||
'fullStationID': 'KWBC',
|
|
||||||
'wfoCityState': 'OCEAN PREDICTION CENTER WASHINGTON DC',
|
|
||||||
'wfoCity': 'WASHINGTON DC',
|
|
||||||
'state': '',
|
|
||||||
},
|
|
||||||
'ONP': {
|
|
||||||
'region': 'NC',
|
|
||||||
'fullStationID': 'KWBC',
|
|
||||||
'wfoCityState': 'OCEAN PREDICTION CENTER WASHINGTON DC',
|
|
||||||
'wfoCity': 'WASHINGTON DC',
|
|
||||||
'state': '',
|
|
||||||
},
|
|
||||||
'OTX': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KOTX',
|
|
||||||
'wfoCityState': 'SPOKANE WA',
|
|
||||||
'wfoCity': 'SPOKANE',
|
|
||||||
'state': 'WASHINGTON',
|
|
||||||
},
|
|
||||||
'OUN': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KOUN',
|
|
||||||
'wfoCityState': 'NORMAN OK',
|
|
||||||
'wfoCity': 'NORMAN',
|
|
||||||
'state': 'OKLAHOMA',
|
|
||||||
},
|
|
||||||
'PAH': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KPAH',
|
|
||||||
'wfoCityState': 'PADUCAH KY',
|
|
||||||
'wfoCity': 'PADUCAH',
|
|
||||||
'state': 'KENTUCKY',
|
|
||||||
},
|
|
||||||
'PBZ': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KPBZ',
|
|
||||||
'wfoCityState': 'PITTSBURGH PA',
|
|
||||||
'wfoCity': 'PITTSBURGH',
|
|
||||||
'state': 'PENNSYLVANIA',
|
|
||||||
},
|
|
||||||
'PDT': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KPDT',
|
|
||||||
'wfoCityState': 'PENDLETON OR',
|
|
||||||
'wfoCity': 'PENDLETON',
|
|
||||||
'state': 'OREGON',
|
|
||||||
},
|
|
||||||
'PHI': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KPHI',
|
|
||||||
'wfoCityState': 'MOUNT HOLLY NJ',
|
|
||||||
'wfoCity': 'MOUNT HOLLY',
|
|
||||||
'state': 'NEW JERSEY',
|
|
||||||
},
|
|
||||||
'PIH': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KPIH',
|
|
||||||
'wfoCityState': 'POCATELLO ID',
|
|
||||||
'wfoCity': 'POCATELLO',
|
|
||||||
'state': 'IDAHO',
|
|
||||||
},
|
|
||||||
'PQR': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KPQR',
|
|
||||||
'wfoCityState': 'PORTLAND OR',
|
|
||||||
'wfoCity': 'PORTLAND',
|
|
||||||
'state': 'OREGON',
|
|
||||||
},
|
|
||||||
'PSR': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KPSR',
|
|
||||||
'wfoCityState': 'PHOENIX AZ',
|
|
||||||
'wfoCity': 'PHOENIX',
|
|
||||||
'state': 'ARIZONA',
|
|
||||||
},
|
|
||||||
'PUB': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KPUB',
|
|
||||||
'wfoCityState': 'PUEBLO CO',
|
|
||||||
'wfoCity': 'PUEBLO',
|
|
||||||
'state': 'COLORADO',
|
|
||||||
},
|
|
||||||
'RAH': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KRAH',
|
|
||||||
'wfoCityState': 'RALEIGH NC',
|
|
||||||
'wfoCity': 'RALEIGH',
|
|
||||||
'state': 'NORTH CAROLINA',
|
|
||||||
},
|
|
||||||
'REV': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KREV',
|
|
||||||
'wfoCityState': 'RENO NV',
|
|
||||||
'wfoCity': 'RENO',
|
|
||||||
'state': 'NEVADA',
|
|
||||||
},
|
|
||||||
'RIW': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KRIW',
|
|
||||||
'wfoCityState': 'RIVERTON WY',
|
|
||||||
'wfoCity': 'RIVERTON',
|
|
||||||
'state': 'WYOMING',
|
|
||||||
},
|
|
||||||
'RLX': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KRLX',
|
|
||||||
'wfoCityState': 'CHARLESTON WV',
|
|
||||||
'wfoCity': 'CHARLESTON',
|
|
||||||
'state': 'WEST VIRGINIA',
|
|
||||||
},
|
|
||||||
'RNK': {
|
|
||||||
'region': 'ER',
|
|
||||||
'fullStationID': 'KRNK',
|
|
||||||
'wfoCityState': 'BLACKSBURG VA',
|
|
||||||
'wfoCity': 'BLACKSBURG',
|
|
||||||
'state': 'VIRGINIA',
|
|
||||||
},
|
|
||||||
'SEW': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KSEW',
|
|
||||||
'wfoCityState': 'SEATTLE WA',
|
|
||||||
'wfoCity': 'SEATTLE',
|
|
||||||
'state': 'WASHINGTON',
|
|
||||||
},
|
|
||||||
'SGF': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KSGF',
|
|
||||||
'wfoCityState': 'SPRINGFIELD MO',
|
|
||||||
'wfoCity': 'SPRINGFIELD',
|
|
||||||
'state': 'MISSOURI',
|
|
||||||
},
|
|
||||||
'SGX': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KSGX',
|
|
||||||
'wfoCityState': 'SAN DIEGO CA',
|
|
||||||
'wfoCity': 'SAN DIEGO',
|
|
||||||
'state': 'CALIFORNIA',
|
|
||||||
},
|
|
||||||
'SHV': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KSHV',
|
|
||||||
'wfoCityState': 'SHREVEPORT LA',
|
|
||||||
'wfoCity': 'SHREVEPORT',
|
|
||||||
'state': 'LOUISIANA',
|
|
||||||
},
|
|
||||||
'SJT': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KSJT',
|
|
||||||
'wfoCityState': 'SAN ANGELO TX',
|
|
||||||
'wfoCity': 'SAN ANGELO',
|
|
||||||
'state': 'TEXAS',
|
|
||||||
},
|
|
||||||
'SJU': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'TJSJ',
|
|
||||||
'wfoCityState': 'SAN JUAN PR',
|
|
||||||
'wfoCity': 'SAN JUAN',
|
|
||||||
'state': 'PUERTO RICO',
|
|
||||||
},
|
|
||||||
'SLC': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KSLC',
|
|
||||||
'wfoCityState': 'SALT LAKE CITY UT',
|
|
||||||
'wfoCity': 'SALT LAKE CITY',
|
|
||||||
'state': 'UTAH',
|
|
||||||
},
|
|
||||||
'STO': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KSTO',
|
|
||||||
'wfoCityState': 'SACRAMENTO CA',
|
|
||||||
'wfoCity': 'SACRAMENTO',
|
|
||||||
'state': 'CALIFORNIA',
|
|
||||||
},
|
|
||||||
'TAE': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KTAE',
|
|
||||||
'wfoCityState': 'TALLAHASSEE FL',
|
|
||||||
'wfoCity': 'TALLAHASSEE',
|
|
||||||
'state': 'FLORIDA',
|
|
||||||
},
|
|
||||||
'TBW': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KTBW',
|
|
||||||
'wfoCityState': 'TAMPA BAY RUSKIN FL',
|
|
||||||
'wfoCity': 'TAMPA BAY RUSKIN',
|
|
||||||
'state': 'FLORIDA',
|
|
||||||
},
|
|
||||||
'TFX': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KTFX',
|
|
||||||
'wfoCityState': 'GREAT FALLS MT',
|
|
||||||
'wfoCity': 'GREAT FALLS',
|
|
||||||
'state': 'MONTANA',
|
|
||||||
},
|
|
||||||
'TOP': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KTOP',
|
|
||||||
'wfoCityState': 'TOPEKA KS',
|
|
||||||
'wfoCity': 'TOPEKA',
|
|
||||||
'state': 'KANSAS',
|
|
||||||
},
|
|
||||||
'TSA': {
|
|
||||||
'region': 'SR',
|
|
||||||
'fullStationID': 'KTSA',
|
|
||||||
'wfoCityState': 'TULSA OK',
|
|
||||||
'wfoCity': 'TULSA',
|
|
||||||
'state': 'OKLAHOMA',
|
|
||||||
},
|
|
||||||
'TWC': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KTWC',
|
|
||||||
'wfoCityState': 'TUCSON AZ',
|
|
||||||
'wfoCity': 'TUCSON',
|
|
||||||
'state': 'ARIZONA',
|
|
||||||
},
|
|
||||||
'UNR': {
|
|
||||||
'region': 'CR',
|
|
||||||
'fullStationID': 'KUNR',
|
|
||||||
'wfoCityState': 'RAPID CITY SD',
|
|
||||||
'wfoCity': 'RAPID CITY',
|
|
||||||
'state': 'SOUTH DAKOTA',
|
|
||||||
},
|
|
||||||
'VEF': {
|
|
||||||
'region': 'WR',
|
|
||||||
'fullStationID': 'KVEF',
|
|
||||||
'wfoCityState': 'LAS VEGAS NV',
|
|
||||||
'wfoCity': 'LAS VEGAS',
|
|
||||||
'state': 'NEVADA',
|
|
||||||
},
|
|
||||||
}
|
|
|
@ -1,90 +0,0 @@
|
||||||
; Logging Configuration
|
|
||||||
|
|
||||||
; To enable file logging and modify the log format see the appropriate sections
|
|
||||||
; below.
|
|
||||||
;
|
|
||||||
; For a more detailed description of this configuration format see the logging
|
|
||||||
; module documentation in the Python Library Reference 14.5.10.2
|
|
||||||
|
|
||||||
; ---- Section Declarations ---------------------------------------------------
|
|
||||||
|
|
||||||
[loggers]
|
|
||||||
keys=root
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys=console,file
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys=console,file
|
|
||||||
|
|
||||||
; ---- Loggers ----------------------------------------------------------------
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level=DEBUG
|
|
||||||
handlers=console,file
|
|
||||||
|
|
||||||
; ---- Handlers ---------------------------------------------------------------
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class=StreamHandler
|
|
||||||
level=INFO
|
|
||||||
formatter=console
|
|
||||||
args=(sys.stdout,)
|
|
||||||
|
|
||||||
[handler_file]
|
|
||||||
class=StreamHandler
|
|
||||||
formatter=console
|
|
||||||
level=CRITICAL
|
|
||||||
args=(sys.stderr,)
|
|
||||||
|
|
||||||
; ---- Enable File Logging ----------------------------------------------------
|
|
||||||
;
|
|
||||||
; Uncomment the following lines to enable file logging. The previous lines can
|
|
||||||
; remain uncommented as the following will simply override the values.
|
|
||||||
;
|
|
||||||
; args replace 'program.log' with desired filename
|
|
||||||
; replace 'w' with 'a' to append to the log file
|
|
||||||
|
|
||||||
;class=FileHandler
|
|
||||||
;level=DEBUG
|
|
||||||
;formatter=file
|
|
||||||
;args=('program.log','a')
|
|
||||||
|
|
||||||
; ---- Formatters -------------------------------------------------------------
|
|
||||||
|
|
||||||
; Configure the format of the console and file log
|
|
||||||
;
|
|
||||||
; %(name)s Name of the logger (logging channel).
|
|
||||||
; %(levelno)s Numeric logging level for the message
|
|
||||||
; (DEBUG, INFO, WARNING, ERROR, CRITICAL).
|
|
||||||
; %(levelname)s Text logging level for the message
|
|
||||||
; ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL').
|
|
||||||
; %(pathname)s Full pathname of the source file where the logging call was
|
|
||||||
; issued (if available).
|
|
||||||
; %(filename)s Filename portion of pathname.
|
|
||||||
; %(module)s Module (name portion of filename).
|
|
||||||
; %(funcName)s Name of function containing the logging call.
|
|
||||||
; %(lineno)d Source line number where the logging call was issued
|
|
||||||
; (if available).
|
|
||||||
; %(created)f Time when the LogRecord was created
|
|
||||||
; (as returned by time.time()).
|
|
||||||
; %(relativeCreated)d
|
|
||||||
; Time in milliseconds when the LogRecord was created,
|
|
||||||
; relative to the time the logging module was loaded.
|
|
||||||
; %(asctime)s Human-readable time when the LogRecord was created. By default
|
|
||||||
; this is of the form ``2003-07-08 16:49:45,896'' (the numbers
|
|
||||||
; after the comma are millisecond portion of the time).
|
|
||||||
; %(msecs)d Millisecond portion of the time when the LogRecord was created.
|
|
||||||
; %(thread)d Thread ID (if available).
|
|
||||||
; %(threadName)s
|
|
||||||
; Thread name (if available).
|
|
||||||
; %(process)d Process ID (if available).
|
|
||||||
; %(message)s The logged message, computed as msg % args.
|
|
||||||
|
|
||||||
[formatter_file]
|
|
||||||
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
|
|
||||||
datefmt=
|
|
||||||
|
|
||||||
[formatter_console]
|
|
||||||
format=%(name)s - %(levelname)s - %(message)s
|
|
||||||
datefmt=
|
|
|
@ -17,6 +17,14 @@
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
# further licensing information.
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
#-------------------------------------------------------------------------
|
#-------------------------------------------------------------------------
|
||||||
# File Name: AFD.py
|
# File Name: AFD.py
|
||||||
# Description: This product creates a Area Forecast Discussion product.
|
# Description: This product creates a Area Forecast Discussion product.
|
||||||
|
@ -245,7 +253,7 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
],
|
],
|
||||||
|
|
||||||
"popStartZ_AM": 12, #hour UTC
|
"popStartZ_AM": 12, #hour UTC
|
||||||
"WWA_Nil" : "NONE.",
|
"WWA_Nil" : "None.",
|
||||||
|
|
||||||
"hazardSamplingThreshold": (10, None), #(%cov, #points)
|
"hazardSamplingThreshold": (10, None), #(%cov, #points)
|
||||||
}
|
}
|
||||||
|
@ -621,12 +629,13 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
|
|
||||||
productName = self.checkTestMode(argDict, self._productName)
|
productName = self.checkTestMode(argDict, self._productName)
|
||||||
|
|
||||||
fcst = fcst + self._pil + "\n\n"
|
s = self._pil + "\n\n" + \
|
||||||
fcst = fcst + productName + "\n"
|
productName + "\n" + \
|
||||||
fcst = fcst + "NATIONAL WEATHER SERVICE "
|
"NATIONAL WEATHER SERVICE " + \
|
||||||
fcst = fcst + self._wfoCityState +"\n"
|
self._wfoCityState +"\n" + \
|
||||||
fcst = fcst + issuedByString
|
issuedByString + \
|
||||||
fcst = fcst + self._timeLabel + "\n\n"
|
self._timeLabel + "\n\n"
|
||||||
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
####################################################################
|
####################################################################
|
||||||
|
@ -776,7 +785,7 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
# If no hazards are found, append the null phrase
|
# If no hazards are found, append the null phrase
|
||||||
|
|
||||||
if len(stateHazardList) == 0:
|
if len(stateHazardList) == 0:
|
||||||
fcst = fcst + "NONE.\n"
|
fcst = fcst + self._WWA_Nil + "\n"
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# If hazards are found, then build the hazard phrases
|
# If hazards are found, then build the hazard phrases
|
||||||
|
@ -822,7 +831,7 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
idString = self.makeUGCString(ids)
|
idString = self.makeUGCString(ids)
|
||||||
|
|
||||||
# hazard phrase
|
# hazard phrase
|
||||||
phrase = hazName + ' ' + timing + ' FOR ' + idString + '.'
|
phrase = hazName + ' ' + timing + ' for ' + idString + '.'
|
||||||
|
|
||||||
# Indent if there is a state list associated
|
# Indent if there is a state list associated
|
||||||
if len(self._state_IDs) > 1:
|
if len(self._state_IDs) > 1:
|
||||||
|
|
|
@ -27,6 +27,15 @@
|
||||||
#
|
#
|
||||||
# Author: davis
|
# Author: davis
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
#-------------------------------------------------------------------------
|
#-------------------------------------------------------------------------
|
||||||
# Example Output:
|
# Example Output:
|
||||||
# Refer to the NWS 10-518 Directive for further information.
|
# Refer to the NWS 10-518 Directive for further information.
|
||||||
|
@ -111,7 +120,6 @@ class TextProduct(CivilEmerg.TextProduct):
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _postProcessProduct(self, fcst, argDict):
|
def _postProcessProduct(self, fcst, argDict):
|
||||||
fcst = string.upper(fcst)
|
|
||||||
fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"])
|
fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"])
|
||||||
self.setProgressPercentage(100)
|
self.setProgressPercentage(100)
|
||||||
self.progressMessage(0, 100, self._displayName + " Complete")
|
self.progressMessage(0, 100, self._displayName + " Complete")
|
||||||
|
|
|
@ -27,6 +27,15 @@
|
||||||
#
|
#
|
||||||
# Author: Matt Davis
|
# Author: Matt Davis
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
#-------------------------------------------------------------------------
|
#-------------------------------------------------------------------------
|
||||||
# Example Output:
|
# Example Output:
|
||||||
# Refer to the NWS 10-922 Directive for further information.
|
# Refer to the NWS 10-922 Directive for further information.
|
||||||
|
@ -119,10 +128,10 @@ class TextProduct(GenericReport.TextProduct):
|
||||||
|
|
||||||
issuedByString = self.getIssuedByString()
|
issuedByString = self.getIssuedByString()
|
||||||
productName = self.checkTestMode(argDict, self._productName)
|
productName = self.checkTestMode(argDict, self._productName)
|
||||||
fcst = fcst + productName + "\n" + \
|
s = productName + "\n" + \
|
||||||
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
||||||
"\n" + issuedByString + self._timeLabel + "\n\n"
|
"\n" + issuedByString + self._timeLabel + "\n\n"
|
||||||
fcst = string.upper(fcst)
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _makeProduct(self, fcst, editArea, areaLabel, argDict):
|
def _makeProduct(self, fcst, editArea, areaLabel, argDict):
|
||||||
|
@ -132,8 +141,6 @@ class TextProduct(GenericReport.TextProduct):
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _postProcessProduct(self, fcst, argDict):
|
def _postProcessProduct(self, fcst, argDict):
|
||||||
fcst = string.upper(fcst)
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Clean up multiple line feeds
|
# Clean up multiple line feeds
|
||||||
#
|
#
|
||||||
|
|
|
@ -1,19 +1,19 @@
|
||||||
##
|
##
|
||||||
# This software was developed and / or modified by Raytheon Company,
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
#
|
#
|
||||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
# This software product contains export-restricted data whose
|
# This software product contains export-restricted data whose
|
||||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
# to non-U.S. persons whether in the United States or abroad requires
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
# an export license or other authorization.
|
# an export license or other authorization.
|
||||||
#
|
#
|
||||||
# Contractor Name: Raytheon Company
|
# Contractor Name: Raytheon Company
|
||||||
# Contractor Address: 6825 Pine Street, Suite 340
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
# Mail Stop B8
|
# Mail Stop B8
|
||||||
# Omaha, NE 68106
|
# Omaha, NE 68106
|
||||||
# 402.291.0100
|
# 402.291.0100
|
||||||
#
|
#
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
# further licensing information.
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
@ -505,7 +505,8 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis,
|
||||||
hazNameA = self.hazardName(eachHazard['hdln'], argDict, True)
|
hazNameA = self.hazardName(eachHazard['hdln'], argDict, True)
|
||||||
hazName = self.hazardName(eachHazard['hdln'], argDict, False)
|
hazName = self.hazardName(eachHazard['hdln'], argDict, False)
|
||||||
|
|
||||||
if hazName == "WINTER WEATHER ADVISORY" or hazName == "WINTER STORM WARNING":
|
# if hazName == "WINTER WEATHER ADVISORY" or hazName == "WINTER STORM WARNING":
|
||||||
|
if hazName in ["WINTER WEATHER ADVISORY", "WINTER STORM WARNING", "BEACH HAZARDS STATEMENT"]:
|
||||||
forPhrase = " FOR |* ENTER HAZARD TYPE *|"
|
forPhrase = " FOR |* ENTER HAZARD TYPE *|"
|
||||||
else:
|
else:
|
||||||
forPhrase =""
|
forPhrase =""
|
||||||
|
@ -735,8 +736,12 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis,
|
||||||
segmentTextSplit[1] = PRECAUTION + segmentTextSplit2[1]
|
segmentTextSplit[1] = PRECAUTION + segmentTextSplit2[1]
|
||||||
segmentText = string.join(segmentTextSplit,"")
|
segmentText = string.join(segmentTextSplit,"")
|
||||||
|
|
||||||
if removeBulletList != []:
|
if keepBulletList == []:
|
||||||
|
segmentText = "\n\n|* WRAP-UP TEXT GOES HERE *|.\n"
|
||||||
|
elif removeBulletList != []:
|
||||||
segmentText = "|*\n" + segmentText + "*|"
|
segmentText = "|*\n" + segmentText + "*|"
|
||||||
|
else:
|
||||||
|
segmentText = segmentText
|
||||||
|
|
||||||
#
|
#
|
||||||
# If segment passes the above checks, add the text
|
# If segment passes the above checks, add the text
|
||||||
|
|
|
@ -105,6 +105,13 @@
|
||||||
# Example Output:
|
# Example Output:
|
||||||
# Refer to the NWS C11 and 10-503 Directives for Public Weather Services.
|
# Refer to the NWS C11 and 10-503 Directives for Public Weather Services.
|
||||||
#-------------------------------------------------------------------------
|
#-------------------------------------------------------------------------
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Removed upper case conversions
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
import TextRules
|
import TextRules
|
||||||
import SampleAnalysis
|
import SampleAnalysis
|
||||||
|
@ -200,7 +207,6 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict)
|
fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict)
|
||||||
fraction = fractionOne
|
fraction = fractionOne
|
||||||
fcst = self._postProcessProduct(fcst, argDict)
|
fcst = self._postProcessProduct(fcst, argDict)
|
||||||
fcst = string.upper(fcst)
|
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _getVariables(self, argDict):
|
def _getVariables(self, argDict):
|
||||||
|
@ -252,13 +258,13 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
issuedByString = self.getIssuedByString()
|
issuedByString = self.getIssuedByString()
|
||||||
productName = self.checkTestMode(argDict, productName)
|
productName = self.checkTestMode(argDict, productName)
|
||||||
|
|
||||||
fcst = fcst + self._wmoID + " " + self._fullStationID + " " + \
|
s = self._wmoID + " " + self._fullStationID + " " + \
|
||||||
self._ddhhmmTime + "\n" + self._pil + "\n\n" +\
|
self._ddhhmmTime + "\n" + self._pil + "\n\n" +\
|
||||||
productName + "\n" +\
|
productName + "\n" +\
|
||||||
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
||||||
"\n" + issuedByString + self._timeLabel + "\n\n"
|
"\n" + issuedByString + self._timeLabel + "\n\n"
|
||||||
|
|
||||||
fcst = string.upper(fcst)
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _preProcessArea(self, fcst, editArea, areaLabel, argDict):
|
def _preProcessArea(self, fcst, editArea, areaLabel, argDict):
|
||||||
|
@ -278,7 +284,6 @@ class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
|
||||||
return fcst + "\n\n$$\n"
|
return fcst + "\n\n$$\n"
|
||||||
|
|
||||||
def _postProcessProduct(self, fcst, argDict):
|
def _postProcessProduct(self, fcst, argDict):
|
||||||
fcst = string.upper(fcst)
|
|
||||||
self.setProgressPercentage(100)
|
self.setProgressPercentage(100)
|
||||||
self.progressMessage(0, 100, self._displayName + " Complete")
|
self.progressMessage(0, 100, self._displayName + " Complete")
|
||||||
return fcst
|
return fcst
|
||||||
|
|
|
@ -20,8 +20,13 @@
|
||||||
########################################################################
|
########################################################################
|
||||||
# Hazard_AQA.py
|
# Hazard_AQA.py
|
||||||
#
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
##
|
##
|
||||||
##########################################################################
|
|
||||||
import GenericHazards
|
import GenericHazards
|
||||||
import string, time, re, os, types, copy
|
import string, time, re, os, types, copy
|
||||||
import ProcessVariableList
|
import ProcessVariableList
|
||||||
|
@ -157,10 +162,10 @@ class TextProduct(GenericHazards.TextProduct):
|
||||||
|
|
||||||
# Placeholder for Agency Names to be filled in in _postProcessProduct
|
# Placeholder for Agency Names to be filled in in _postProcessProduct
|
||||||
#fcst = fcst + "@AGENCYNAMES" + "\n"
|
#fcst = fcst + "@AGENCYNAMES" + "\n"
|
||||||
fcst = fcst + "RELAYED BY NATIONAL WEATHER SERVICE " + self._wfoCityState + "\n" +\
|
s = "RELAYED BY NATIONAL WEATHER SERVICE " + self._wfoCityState + "\n" +\
|
||||||
issuedByString + self._timeLabel + "\n\n"
|
issuedByString + self._timeLabel + "\n\n"
|
||||||
|
|
||||||
fcst = string.upper(fcst)
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def headlinesTiming(self, tree, node, key, timeRange, areaLabel, issuanceTime):
|
def headlinesTiming(self, tree, node, key, timeRange, areaLabel, issuanceTime):
|
||||||
|
@ -301,8 +306,6 @@ class TextProduct(GenericHazards.TextProduct):
|
||||||
|
|
||||||
fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL)
|
fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL)
|
||||||
fcst = fixMultiLF.sub(r'\1', fcst)
|
fcst = fixMultiLF.sub(r'\1', fcst)
|
||||||
## # Keep body in lower case, if desired
|
|
||||||
## fcst = string.upper(fcst)
|
|
||||||
self.setProgressPercentage(100)
|
self.setProgressPercentage(100)
|
||||||
self.progressMessage(0, 100, self._displayName + " Complete")
|
self.progressMessage(0, 100, self._displayName + " Complete")
|
||||||
return fcst
|
return fcst
|
||||||
|
|
|
@ -104,7 +104,7 @@ class TextProduct(GenericHazards.TextProduct):
|
||||||
('RP.S', allActions, 'RipCurrent'), # HIGH RIP CURRENT RISK
|
('RP.S', allActions, 'RipCurrent'), # HIGH RIP CURRENT RISK
|
||||||
]
|
]
|
||||||
|
|
||||||
def _bulletDict(self):
|
def _bulletDict(self):
|
||||||
return {
|
return {
|
||||||
"CF" : ("COASTAL FLOODING,TIMING,IMPACTS"), ### coastal flood warning, advisory, watch
|
"CF" : ("COASTAL FLOODING,TIMING,IMPACTS"), ### coastal flood warning, advisory, watch
|
||||||
"LS" : ("LAKE SHORE FLOODING,TIMING,IMPACTS"), ### lake shore flood warning, advisory, watch
|
"LS" : ("LAKE SHORE FLOODING,TIMING,IMPACTS"), ### lake shore flood warning, advisory, watch
|
||||||
|
|
|
@ -22,6 +22,15 @@
|
||||||
#
|
#
|
||||||
##
|
##
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
import GenericHazards
|
import GenericHazards
|
||||||
import string, time, re, os, types, copy, sets
|
import string, time, re, os, types, copy, sets
|
||||||
import ModuleAccessor, LogStream
|
import ModuleAccessor, LogStream
|
||||||
|
@ -108,12 +117,12 @@ class TextProduct(GenericHazards.TextProduct):
|
||||||
productName = self.checkTestMode(argDict,
|
productName = self.checkTestMode(argDict,
|
||||||
self._productName + watchPhrase)
|
self._productName + watchPhrase)
|
||||||
|
|
||||||
fcst = fcst + self._wmoID + " " + self._fullStationID + " " + \
|
s = self._wmoID + " " + self._fullStationID + " " + \
|
||||||
self._ddhhmmTime + "\n" + self._pil + "\n\n" +\
|
self._ddhhmmTime + "\n" + self._pil + "\n\n" +\
|
||||||
productName + "\n" +\
|
productName + "\n" +\
|
||||||
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
||||||
"\n" + issuedByString + self._timeLabel + "\n" + self._easPhrase + "\n"
|
"\n" + issuedByString + self._timeLabel + "\n" + self._easPhrase + "\n"
|
||||||
fcst = string.upper(fcst)
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,14 @@
|
||||||
#
|
#
|
||||||
# Author: Matt Davis
|
# Author: Matt Davis
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
|
|
||||||
import GenericReport
|
import GenericReport
|
||||||
|
@ -67,7 +75,7 @@ class TextProduct(GenericReport.TextProduct):
|
||||||
"language": "english",
|
"language": "english",
|
||||||
"lineLength": 66, #Maximum line length
|
"lineLength": 66, #Maximum line length
|
||||||
"includeCities" : 0, # Cities included in area header
|
"includeCities" : 0, # Cities included in area header
|
||||||
"cityDescriptor" : "INCLUDING THE CITIES OF",
|
"cityDescriptor" : "Including the cities of",
|
||||||
"includeZoneNames" : 0, # Zone names will be included in the area header
|
"includeZoneNames" : 0, # Zone names will be included in the area header
|
||||||
"includeIssueTime" : 0, # This should be set to zero
|
"includeIssueTime" : 0, # This should be set to zero
|
||||||
"singleComboOnly" : 1, # Used for non-segmented products
|
"singleComboOnly" : 1, # Used for non-segmented products
|
||||||
|
@ -108,20 +116,18 @@ class TextProduct(GenericReport.TextProduct):
|
||||||
|
|
||||||
issuedByString = self.getIssuedByString()
|
issuedByString = self.getIssuedByString()
|
||||||
productName = self.checkTestMode(argDict, self._productName)
|
productName = self.checkTestMode(argDict, self._productName)
|
||||||
fcst = fcst + productName + "\n" + \
|
s = productName + "\n" + \
|
||||||
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
||||||
"\n" + issuedByString + self._timeLabel + "\n\n"
|
"\n" + issuedByString + self._timeLabel + "\n\n"
|
||||||
fcst = string.upper(fcst)
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _makeProduct(self, fcst, editArea, areaLabel, argDict):
|
def _makeProduct(self, fcst, editArea, areaLabel, argDict):
|
||||||
fcst = fcst + "...PUBLIC INFORMATION STATEMENT...\n\n"
|
fcst = fcst + "...PUBLIC INFORMATION STATEMENT...\n\n"
|
||||||
fcst = fcst + "|* INFORMATION GOES HERE *|\n\n"
|
fcst = fcst + "|* Information goes here *|\n\n"
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _postProcessProduct(self, fcst, argDict):
|
def _postProcessProduct(self, fcst, argDict):
|
||||||
fcst = string.upper(fcst)
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Clean up multiple line feeds
|
# Clean up multiple line feeds
|
||||||
#
|
#
|
||||||
|
|
|
@ -27,6 +27,15 @@
|
||||||
#
|
#
|
||||||
# Author: Matt Davis
|
# Author: Matt Davis
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Oct 20, 2014 #3685 randerso Changed to support mixed case
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
#-------------------------------------------------------------------------
|
#-------------------------------------------------------------------------
|
||||||
# Example Output:
|
# Example Output:
|
||||||
# Refer to the NWS 10-518 Directive for further information.
|
# Refer to the NWS 10-518 Directive for further information.
|
||||||
|
@ -111,10 +120,10 @@ class TextProduct(GenericReport.TextProduct):
|
||||||
|
|
||||||
issuedByString = self.getIssuedByString()
|
issuedByString = self.getIssuedByString()
|
||||||
productName = self.checkTestMode(argDict, self._productName)
|
productName = self.checkTestMode(argDict, self._productName)
|
||||||
fcst = fcst + productName + "\n" + \
|
s = productName + "\n" + \
|
||||||
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
"NATIONAL WEATHER SERVICE " + self._wfoCityState + \
|
||||||
"\n" + issuedByString + self._timeLabel + "\n\n"
|
"\n" + issuedByString + self._timeLabel + "\n\n"
|
||||||
fcst = string.upper(fcst)
|
fcst = fcst + s.upper()
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _makeProduct(self, fcst, editArea, areaLabel, argDict):
|
def _makeProduct(self, fcst, editArea, areaLabel, argDict):
|
||||||
|
@ -123,8 +132,6 @@ class TextProduct(GenericReport.TextProduct):
|
||||||
return fcst
|
return fcst
|
||||||
|
|
||||||
def _postProcessProduct(self, fcst, argDict):
|
def _postProcessProduct(self, fcst, argDict):
|
||||||
fcst = string.upper(fcst)
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Clean up multiple line feeds
|
# Clean up multiple line feeds
|
||||||
#
|
#
|
||||||
|
|
|
@ -24,26 +24,26 @@
|
||||||
222:222:Reflectivity at -20C:dBZ:ReflectivityM20C
|
222:222:Reflectivity at -20C:dBZ:ReflectivityM20C
|
||||||
223:223:Low-Level Rotation Tracks 0-2km AGL (60 min. accum.):1/s:RotationTrackLL60min
|
223:223:Low-Level Rotation Tracks 0-2km AGL (60 min. accum.):1/s:RotationTrackLL60min
|
||||||
224:224:Low-Level Rotation Tracks 0-2km AGL (1440 min. accum.):1/s:RotationTrackLL1440min
|
224:224:Low-Level Rotation Tracks 0-2km AGL (1440 min. accum.):1/s:RotationTrackLL1440min
|
||||||
225:225:Mid-Level Rotation Tracks 0-3km AGL (60 min. accum.):1/s:RotationTrackML60min
|
225:225:Mid-Level Rotation Tracks 3-6km AGL (60 min. accum.):1/s:RotationTrackML60min
|
||||||
226:226:Mid-Level Rotation Tracks 0-3km AGL (1440 min. accum.):1/s:RotationTrackML1440min
|
226:226:Mid-Level Rotation Tracks 3-6km AGL (1440 min. accum.):1/s:RotationTrackML1440min
|
||||||
227:227:Surface Precipitation Type (SPT)::PrecipType
|
227:227:Surface Precipitation Type (SPT)::PrecipType
|
||||||
228:228:Radar Precipitation Rate (SPR):mm/hr:PrecipRate
|
228:228:Radar Precipitation Rate (SPR):mm/hr:PrecipRate
|
||||||
229:229:Seamless Hybrid Scan Reflectivity (SHSR):dBZ:SeamlessHSR
|
229:229:Seamless Hybrid Scan Reflectivity (SHSR):dBZ:SeamlessHSR
|
||||||
230:230:Radar Quality Index (RQI)::RadarQualityIndex
|
230:230:Radar Quality Index (RQI)::RadarQualityIndex
|
||||||
231:231:QPE - Radar Only (1 hr. accum):mm:RadarOnlyQPE01H
|
231:231:QPE - Radar Only (1 hr. accum.):mm:RadarOnlyQPE01H
|
||||||
232:232:QPE - Radar Only (3 hr. accum.):RadarOnlyQPE03H
|
232:232:QPE - Radar Only (3 hr. accum.):RadarOnlyQPE03H
|
||||||
233:233:QPE - Radar Only (6 hr. accum.):mm:RadarOnlyQPE06H
|
233:233:QPE - Radar Only (6 hr. accum.):mm:RadarOnlyQPE06H
|
||||||
234:234:QPE - Radar Only (12 hr. accum.):mm:RadarOnlyQPE12H
|
234:234:QPE - Radar Only (12 hr. accum.):mm:RadarOnlyQPE12H
|
||||||
235:235:QPE - Radar Only (24 hr. accum.):mm:RadarOnlyQPE24H
|
235:235:QPE - Radar Only (24 hr. accum.):mm:RadarOnlyQPE24H
|
||||||
236:236:QPE - Radar Only (48 hr. accum.):mm:RadarOnlyQPE48H
|
236:236:QPE - Radar Only (48 hr. accum.):mm:RadarOnlyQPE48H
|
||||||
237:237:QPE - Radar Only (72 hr. accum.):mm:RadarOnlyQPE72H
|
237:237:QPE - Radar Only (72 hr. accum.):mm:RadarOnlyQPE72H
|
||||||
238:238:QPE - Radar w/ Gauge Bias Correction (1 hr. accum.):mm:GaugeCorrQPE01H
|
238:238:QPE - Radar with Gauge Bias Correction (1 hr. accum.):mm:GaugeCorrQPE01H
|
||||||
239:239:QPE - Radar w/ Gauge Bias Correction (3 hr. accum.):mm:GaugeCorrQPE03H
|
239:239:QPE - Radar with Gauge Bias Correction (3 hr. accum.):mm:GaugeCorrQPE03H
|
||||||
240:240:QPE - Radar w/ Gauge Bias Correction (6 hr. accum.):mm:GaugeCorrQPE06H
|
240:240:QPE - Radar with Gauge Bias Correction (6 hr. accum.):mm:GaugeCorrQPE06H
|
||||||
241:241:QPE - Radar w/ Gauge Bias Correction (12 hr. accum.):mm:GaugeCorrQPE12H
|
241:241:QPE - Radar with Gauge Bias Correction (12 hr. accum.):mm:GaugeCorrQPE12H
|
||||||
242:242:QPE - Radar w/ Gauge Bias Correction (24 hr. accum.):mm:GaugeCorrQPE24H
|
242:242:QPE - Radar with Gauge Bias Correction (24 hr. accum.):mm:GaugeCorrQPE24H
|
||||||
243:243:QPE - Radar w/ Gauge Bias Correction (48 hr. accum.):mm:GaugeCorrQPE48H
|
243:243:QPE - Radar with Gauge Bias Correction (48 hr. accum.):mm:GaugeCorrQPE48H
|
||||||
244:244:QPE - Radar w/ Gauge Bias Correction (72 hr. accum.):mm:GaugeCorrQPE72H
|
244:244:QPE - Radar with Gauge Bias Correction (72 hr. accum.):mm:GaugeCorrQPE72H
|
||||||
245:245:QPE - Mountain Mapper (1 hr. accum.):mm:MountainMapperQPE01H
|
245:245:QPE - Mountain Mapper (1 hr. accum.):mm:MountainMapperQPE01H
|
||||||
246:246:QPE - Mountain Mapper (3 hr. accum.):mm:MountainMapperQPE03H
|
246:246:QPE - Mountain Mapper (3 hr. accum.):mm:MountainMapperQPE03H
|
||||||
247:247:QPE - Mountain Mapper (6 hr. accum.):mm:MountainMapperQPE06H
|
247:247:QPE - Mountain Mapper (6 hr. accum.):mm:MountainMapperQPE06H
|
||||||
|
|
|
@ -7,12 +7,12 @@
|
||||||
5:5:Low-Level Rotation Tracks 0-2km AGL (240 min. accum.):1/s:RotationTrackLL240min
|
5:5:Low-Level Rotation Tracks 0-2km AGL (240 min. accum.):1/s:RotationTrackLL240min
|
||||||
6:6:Low-Level Rotation Tracks 0-2km AGL (360 min. accum.):1/s:RotationTrackLL360min
|
6:6:Low-Level Rotation Tracks 0-2km AGL (360 min. accum.):1/s:RotationTrackLL360min
|
||||||
7:7:Low-Level Rotation Tracks 0-2km AGL (1440 min. accum.):1/s:RotationTrackLL1440min
|
7:7:Low-Level Rotation Tracks 0-2km AGL (1440 min. accum.):1/s:RotationTrackLL1440min
|
||||||
14:14:Mid-Level Rotation Tracks 0-3km AGL (30 min. accum.):1/s:RotationTrackML30min
|
14:14:Mid-Level Rotation Tracks 3-6km AGL (30 min. accum.):1/s:RotationTrackML30min
|
||||||
15:15:Mid-Level Rotation Tracks 0-3km AGL (60 min. accum.):1/s:RotationTrackML60min
|
15:15:Mid-Level Rotation Tracks 3-6km AGL (60 min. accum.):1/s:RotationTrackML60min
|
||||||
16:16:Mid-Level Rotation Tracks 0-3km AGL (120 min. accum.):1/s:RotationTrackML120min
|
16:16:Mid-Level Rotation Tracks 3-6km AGL (120 min. accum.):1/s:RotationTrackML120min
|
||||||
17:17:Mid-Level Rotation Tracks 0-3km AGL (240 min. accum.):1/s:RotationTrackML240min
|
17:17:Mid-Level Rotation Tracks 3-6km AGL (240 min. accum.):1/s:RotationTrackML240min
|
||||||
18:18:Mid-Level Rotation Tracks 0-3km AGL (360 min. accum.):1/s:RotationTrackML360min
|
18:18:Mid-Level Rotation Tracks 3-6km AGL (360 min. accum.):1/s:RotationTrackML360min
|
||||||
19:19:Mid-Level Rotation Tracks 0-3km AGL (1440 min. accum.):1/s:RotationTrackML1440min
|
19:19:Mid-Level Rotation Tracks 3-6km AGL (1440 min. accum.):1/s:RotationTrackML1440min
|
||||||
26:26:Severe Hail Index (SHI)::SHI
|
26:26:Severe Hail Index (SHI)::SHI
|
||||||
27:27:Probability of Severe Hail (POSH):%:POSH
|
27:27:Probability of Severe Hail (POSH):%:POSH
|
||||||
28:28:Maximum Estimated Size of Hail (MESH):mm:MESH
|
28:28:Maximum Estimated Size of Hail (MESH):mm:MESH
|
||||||
|
|
|
@ -1,20 +1,20 @@
|
||||||
#Product Discipline 209 - Multi-Radar/Multi-Sensor, Parameter Category 6: QPE Products
|
#Product Discipline 209 - Multi-Radar/Multi-Sensor, Parameter Category 6: QPE Products
|
||||||
0:0:Surface Precipitation Type (SPT)::PrecipType
|
0:0:Surface Precipitation Type (SPT)::PrecipType
|
||||||
1:1:Radar Precipitation Rate (SPR):mm/hr:PrecipRate
|
1:1:Radar Precipitation Rate (SPR):mm/hr:PrecipRate
|
||||||
2:2:QPE - Radar Only (1 hr. accum):mm:RadarOnlyQPE01H
|
2:2:QPE - Radar Only (1 hr. accum.):mm:RadarOnlyQPE01H
|
||||||
3:3:QPE - Radar Only (3 hr. accum.):RadarOnlyQPE03H
|
3:3:QPE - Radar Only (3 hr. accum.):RadarOnlyQPE03H
|
||||||
4:4:QPE - Radar Only (6 hr. accum.):mm:RadarOnlyQPE06H
|
4:4:QPE - Radar Only (6 hr. accum.):mm:RadarOnlyQPE06H
|
||||||
5:5:QPE - Radar Only (12 hr. accum.):mm:RadarOnlyQPE12H
|
5:5:QPE - Radar Only (12 hr. accum.):mm:RadarOnlyQPE12H
|
||||||
6:6:QPE - Radar Only (24 hr. accum.):mm:RadarOnlyQPE24H
|
6:6:QPE - Radar Only (24 hr. accum.):mm:RadarOnlyQPE24H
|
||||||
7:7:QPE - Radar Only (48 hr. accum.):mm:RadarOnlyQPE48H
|
7:7:QPE - Radar Only (48 hr. accum.):mm:RadarOnlyQPE48H
|
||||||
8:8:QPE - Radar Only (72 hr. accum.):mm:RadarOnlyQPE72H
|
8:8:QPE - Radar Only (72 hr. accum.):mm:RadarOnlyQPE72H
|
||||||
9:9:QPE - Radar w/ Gauge Bias Correction (1 hr. accum.):mm:GaugeCorrQPE01H
|
9:9:QPE - Radar with Gauge Bias Correction (1 hr. accum.):mm:GaugeCorrQPE01H
|
||||||
10:10:QPE - Radar w/ Gauge Bias Correction (3 hr. accum.):mm:GaugeCorrQPE03H
|
10:10:QPE - Radar with Gauge Bias Correction (3 hr. accum.):mm:GaugeCorrQPE03H
|
||||||
11:11:QPE - Radar w/ Gauge Bias Correction (6 hr. accum.):mm:GaugeCorrQPE06H
|
11:11:QPE - Radar with Gauge Bias Correction (6 hr. accum.):mm:GaugeCorrQPE06H
|
||||||
12:12:QPE - Radar w/ Gauge Bias Correction (12 hr. accum.):mm:GaugeCorrQPE12H
|
12:12:QPE - Radar with Gauge Bias Correction (12 hr. accum.):mm:GaugeCorrQPE12H
|
||||||
13:13:QPE - Radar w/ Gauge Bias Correction (24 hr. accum.):mm:GaugeCorrQPE24H
|
13:13:QPE - Radar with Gauge Bias Correction (24 hr. accum.):mm:GaugeCorrQPE24H
|
||||||
14:14:QPE - Radar w/ Gauge Bias Correction (48 hr. accum.):mm:GaugeCorrQPE48H
|
14:14:QPE - Radar with Gauge Bias Correction (48 hr. accum.):mm:GaugeCorrQPE48H
|
||||||
15:15:QPE - Radar w/ Gauge Bias Correction (72 hr. accum.):mm:GaugeCorrQPE72H
|
15:15:QPE - Radar with Gauge Bias Correction (72 hr. accum.):mm:GaugeCorrQPE72H
|
||||||
16:16:QPE - Radar Gauge Only (1 hr. accum.):mm:GaugeOnlyQPE01H
|
16:16:QPE - Radar Gauge Only (1 hr. accum.):mm:GaugeOnlyQPE01H
|
||||||
17:17:QPE - Radar Gauge Only (3 hr. accum.):mm:GaugeOnlyQPE03H
|
17:17:QPE - Radar Gauge Only (3 hr. accum.):mm:GaugeOnlyQPE03H
|
||||||
18:18:QPE - Radar Gauge Only (6 hr. accum.):mm:GaugeOnlyQPE06H
|
18:18:QPE - Radar Gauge Only (6 hr. accum.):mm:GaugeOnlyQPE06H
|
||||||
|
|
|
@ -47,6 +47,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* 7/24/07 353 bphillip Initial Check in
|
* 7/24/07 353 bphillip Initial Check in
|
||||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||||
|
* 10/28/2014 3454 bphillip Fix usage of getSession()
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -170,7 +171,13 @@ public class RadarStationDao extends CoreDao {
|
||||||
}
|
}
|
||||||
crit.add(stationEq);
|
crit.add(stationEq);
|
||||||
Session session = getSession();
|
Session session = getSession();
|
||||||
return crit.getExecutableCriteria(session).list();
|
try {
|
||||||
|
return crit.getExecutableCriteria(session).list();
|
||||||
|
} finally {
|
||||||
|
if (session != null){
|
||||||
|
session.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.warn("Cannot execute spatial query with less than 3 points");
|
logger.warn("Cannot execute spatial query with less than 3 points");
|
||||||
return new ArrayList<RadarStation>();
|
return new ArrayList<RadarStation>();
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.common.dataplugin.grid.util;
|
package com.raytheon.uf.common.dataplugin.grid.util;
|
||||||
|
|
||||||
|
import java.lang.ref.SoftReference;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -32,11 +33,14 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
/**
|
/**
|
||||||
* A class for calculating and caching static data for grids.
|
* A class for calculating and caching static data for grids.
|
||||||
*
|
*
|
||||||
|
* Orignally ported from GridAccessor5.C
|
||||||
|
*
|
||||||
* <pre>
|
* <pre>
|
||||||
* SOFTWARE HISTORY
|
* SOFTWARE HISTORY
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Jul 24, 2008 brockwoo Initial creation
|
* Jul 24, 2008 brockwoo Initial creation
|
||||||
|
* Oct 21, 2014 3721 dlovely Optimized for reduced memory usage
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -45,7 +49,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class StaticGridData {
|
public class StaticGridData {
|
||||||
private static Map<GridCoverage, StaticGridData> instanceMap = new HashMap<GridCoverage, StaticGridData>();
|
private static Map<GridCoverage, SoftReference<StaticGridData>> instanceMap = new HashMap<GridCoverage, SoftReference<StaticGridData>>();
|
||||||
|
|
||||||
private static final double R_EARTH = 6370.0;
|
private static final double R_EARTH = 6370.0;
|
||||||
|
|
||||||
|
@ -61,11 +65,17 @@ public class StaticGridData {
|
||||||
|
|
||||||
public static synchronized StaticGridData getInstance(
|
public static synchronized StaticGridData getInstance(
|
||||||
GridCoverage gridCoverage) {
|
GridCoverage gridCoverage) {
|
||||||
StaticGridData rval = instanceMap.get(gridCoverage);
|
SoftReference<StaticGridData> data = instanceMap.get(gridCoverage);
|
||||||
|
|
||||||
if (rval == null) {
|
StaticGridData rval = null;
|
||||||
|
if (null != data) {
|
||||||
|
rval = data.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (null == data || null == rval) {
|
||||||
rval = new StaticGridData(gridCoverage);
|
rval = new StaticGridData(gridCoverage);
|
||||||
instanceMap.put(gridCoverage, rval);
|
data = new SoftReference<StaticGridData>(rval);
|
||||||
|
instanceMap.put(gridCoverage, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
return rval;
|
return rval;
|
||||||
|
@ -83,6 +93,13 @@ public class StaticGridData {
|
||||||
return this.dy;
|
return this.dy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes the Dx, Dy and Coriolis data from the provided
|
||||||
|
* {@link GridCoverage}.
|
||||||
|
*
|
||||||
|
* @param gridCoverage
|
||||||
|
* Grid Coverage.
|
||||||
|
*/
|
||||||
private void initStaticData(GridCoverage gridCoverage) {
|
private void initStaticData(GridCoverage gridCoverage) {
|
||||||
int nx = gridCoverage.getNx();
|
int nx = gridCoverage.getNx();
|
||||||
int ny = gridCoverage.getNy();
|
int ny = gridCoverage.getNy();
|
||||||
|
@ -91,52 +108,73 @@ public class StaticGridData {
|
||||||
float[] dxPtr = new float[n];
|
float[] dxPtr = new float[n];
|
||||||
float[] dyPtr = new float[n];
|
float[] dyPtr = new float[n];
|
||||||
float[] avgPtr = new float[n];
|
float[] avgPtr = new float[n];
|
||||||
double[] xx = new double[n];
|
float[] xxU = new float[nx];
|
||||||
double[] yy = new double[n];
|
float[] xxC = new float[nx];
|
||||||
double[] zz = new double[n];
|
float[] xxD = new float[nx];
|
||||||
|
float[] yyU = new float[nx];
|
||||||
|
float[] yyC = new float[nx];
|
||||||
|
float[] yyD = new float[nx];
|
||||||
|
float[] zzU = new float[nx];
|
||||||
|
float[] zzC = new float[nx];
|
||||||
|
float[] zzD = new float[nx];
|
||||||
|
|
||||||
|
float[] tmpXX, tmpYY, tmpZZ;
|
||||||
|
|
||||||
int i, j, k;
|
int i, j, k;
|
||||||
|
|
||||||
for (j = k = 0; j < ny; j++) {
|
// Populate Up rows.
|
||||||
for (i = 0; i < nx; i++, k++) {
|
for (i = 0; i < nx; i++) {
|
||||||
Coordinate location = new Coordinate(i, j);
|
Coordinate location = new Coordinate(i, 1);
|
||||||
Coordinate latLon = MapUtil.gridCoordinateToLatLon(location,
|
Coordinate latLon = MapUtil.gridCoordinateToLatLon(location,
|
||||||
PixelOrientation.CENTER, gridCoverage);
|
PixelOrientation.CENTER, gridCoverage);
|
||||||
latLon.x = Math.toRadians(latLon.x);
|
latLon.x = Math.toRadians(latLon.x);
|
||||||
latLon.y = Math.toRadians(latLon.y);
|
latLon.y = Math.toRadians(latLon.y);
|
||||||
xx[k] = Math.cos(latLon.y);
|
xxU[i] = (float) Math.cos(latLon.y);
|
||||||
yy[k] = xx[k] * Math.sin(latLon.x);
|
yyU[i] = (float) (xxU[i] * Math.sin(latLon.x));
|
||||||
xx[k] *= Math.cos(latLon.x);
|
xxU[i] *= Math.cos(latLon.x);
|
||||||
zz[k] = Math.sin(latLon.y);
|
zzU[i] = (float) Math.sin(latLon.y);
|
||||||
_coriolis[k] = (float) (zz[k] * 1.458e-4);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.coriolis = newRecord(_coriolis, nx, ny);
|
// Populate Current rows.
|
||||||
|
for (i = 0; i < nx; i++) {
|
||||||
|
Coordinate location = new Coordinate(i, 0);
|
||||||
|
Coordinate latLon = MapUtil.gridCoordinateToLatLon(location,
|
||||||
|
PixelOrientation.CENTER, gridCoverage);
|
||||||
|
latLon.x = Math.toRadians(latLon.x);
|
||||||
|
latLon.y = Math.toRadians(latLon.y);
|
||||||
|
xxC[i] = (float) Math.cos(latLon.y);
|
||||||
|
yyC[i] = (float) (xxC[i] * Math.sin(latLon.x));
|
||||||
|
xxC[i] *= Math.cos(latLon.x);
|
||||||
|
zzC[i] = (float) Math.sin(latLon.y);
|
||||||
|
}
|
||||||
|
|
||||||
int up, dn, lft, rgt;
|
// Init Down as a copy of Current
|
||||||
long _nxm = nx - 1;
|
System.arraycopy(xxC, 0, xxD, 0, nx);
|
||||||
|
System.arraycopy(yyC, 0, yyD, 0, nx);
|
||||||
|
System.arraycopy(zzC, 0, zzD, 0, nx);
|
||||||
|
|
||||||
|
int lft, rgt;
|
||||||
double d;
|
double d;
|
||||||
double icomp, jcomp, kcomp;
|
double icomp, jcomp, kcomp;
|
||||||
double dmax = 0.0;
|
double dmax = 0.0;
|
||||||
dn = 0;
|
|
||||||
up = nx;
|
|
||||||
for (j = k = 0; j < ny; j++) {
|
for (j = k = 0; j < ny; j++) {
|
||||||
if (up >= n) {
|
|
||||||
up -= nx;
|
lft = 0;
|
||||||
}
|
|
||||||
lft = k;
|
|
||||||
for (i = 0; i < nx; i++, k++) {
|
for (i = 0; i < nx; i++, k++) {
|
||||||
rgt = (i < _nxm ? k + 1 : k);
|
_coriolis[k] = (float) (zzC[i] * 1.458e-4);
|
||||||
icomp = yy[lft] * zz[rgt] - zz[lft] * yy[rgt];
|
rgt = (i < nx - 1 ? i + 1 : i);
|
||||||
jcomp = zz[lft] * xx[rgt] - xx[lft] * zz[rgt];
|
icomp = yyC[lft] * zzC[rgt] - zzC[lft] * yyC[rgt];
|
||||||
kcomp = xx[lft] * yy[rgt] - yy[lft] * xx[rgt];
|
jcomp = zzC[lft] * xxC[rgt] - xxC[lft] * zzC[rgt];
|
||||||
|
kcomp = xxC[lft] * yyC[rgt] - yyC[lft] * xxC[rgt];
|
||||||
d = Math.sqrt(icomp * icomp + jcomp * jcomp + kcomp * kcomp);
|
d = Math.sqrt(icomp * icomp + jcomp * jcomp + kcomp * kcomp);
|
||||||
dxPtr[k] = (float) (Math.asin(d) * 1000.0 * R_EARTH / (rgt - lft));
|
dxPtr[k] = (float) (Math.asin(d) * 1000.0 * R_EARTH / (rgt - lft));
|
||||||
icomp = yy[dn] * zz[up] - zz[dn] * yy[up];
|
icomp = yyD[i] * zzU[i] - zzD[i] * yyU[i];
|
||||||
jcomp = zz[dn] * xx[up] - xx[dn] * zz[up];
|
jcomp = zzD[i] * xxU[i] - xxD[i] * zzU[i];
|
||||||
kcomp = xx[dn] * yy[up] - yy[dn] * xx[up];
|
kcomp = xxD[i] * yyU[i] - yyD[i] * xxU[i];
|
||||||
d = Math.sqrt(icomp * icomp + jcomp * jcomp + kcomp * kcomp);
|
d = Math.sqrt(icomp * icomp + jcomp * jcomp + kcomp * kcomp);
|
||||||
dyPtr[k] = (float) (Math.asin(d) * 1000.0 * R_EARTH * nx / (up - dn));
|
dyPtr[k] = (float) (Math.asin(d) * 1000.0 * R_EARTH * (j == 0
|
||||||
|
|| j == (ny - 1) ? 1 : 0.5));
|
||||||
avgPtr[k] = (dxPtr[k] + dyPtr[k]) / 2.0f;
|
avgPtr[k] = (dxPtr[k] + dyPtr[k]) / 2.0f;
|
||||||
d = dxPtr[k] - dyPtr[k];
|
d = dxPtr[k] - dyPtr[k];
|
||||||
if (d < 0) {
|
if (d < 0) {
|
||||||
|
@ -146,14 +184,52 @@ public class StaticGridData {
|
||||||
if (d > dmax) {
|
if (d > dmax) {
|
||||||
dmax = d;
|
dmax = d;
|
||||||
}
|
}
|
||||||
dn++;
|
if (i != 0) {
|
||||||
up++;
|
lft++;
|
||||||
lft = k;
|
}
|
||||||
}
|
}
|
||||||
if (j == 0) {
|
|
||||||
dn = 0;
|
// Move Current to Down and Up to Current.
|
||||||
|
tmpXX = xxD;
|
||||||
|
xxD = xxC;
|
||||||
|
xxC = xxU;
|
||||||
|
|
||||||
|
tmpYY = yyD;
|
||||||
|
yyD = yyC;
|
||||||
|
yyC = yyU;
|
||||||
|
|
||||||
|
tmpZZ = zzD;
|
||||||
|
zzD = zzC;
|
||||||
|
zzC = zzU;
|
||||||
|
|
||||||
|
// Construct the next Up row with new data unless this is the last
|
||||||
|
// pass then duplicate the current row.
|
||||||
|
if (j < ny - 2) {
|
||||||
|
// Populate the next Up row.
|
||||||
|
xxU = tmpXX;
|
||||||
|
yyU = tmpYY;
|
||||||
|
zzU = tmpZZ;
|
||||||
|
for (i = 0; i < nx; i++) {
|
||||||
|
Coordinate location = new Coordinate(i, j + 2);
|
||||||
|
Coordinate latLon = MapUtil.gridCoordinateToLatLon(
|
||||||
|
location, PixelOrientation.CENTER, gridCoverage);
|
||||||
|
latLon.x = Math.toRadians(latLon.x);
|
||||||
|
latLon.y = Math.toRadians(latLon.y);
|
||||||
|
xxU[i] = (float) Math.cos(latLon.y);
|
||||||
|
yyU[i] = (float) (xxU[i] * Math.sin(latLon.x));
|
||||||
|
xxU[i] *= Math.cos(latLon.x);
|
||||||
|
zzU[i] = (float) Math.sin(latLon.y);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If the last run, Duplicate the Current row to the Up row.
|
||||||
|
xxU = xxC;
|
||||||
|
yyU = yyC;
|
||||||
|
zzU = zzC;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.coriolis = newRecord(_coriolis, nx, ny);
|
||||||
|
|
||||||
if (dmax > 0.01) {
|
if (dmax > 0.01) {
|
||||||
this.dx = newRecord(dxPtr, nx, ny);
|
this.dx = newRecord(dxPtr, nx, ny);
|
||||||
this.dy = newRecord(dyPtr, nx, ny);
|
this.dy = newRecord(dyPtr, nx, ny);
|
||||||
|
|
|
@ -69,6 +69,8 @@ import com.raytheon.uf.common.time.DataTime;
|
||||||
* Mar 13, 2014 2907 njensen split edex.redbook plugin into common and
|
* Mar 13, 2014 2907 njensen split edex.redbook plugin into common and
|
||||||
* edex redbook plugins
|
* edex redbook plugins
|
||||||
* Oct 10, 2014 3720 mapeters Removed dataURI column.
|
* Oct 10, 2014 3720 mapeters Removed dataURI column.
|
||||||
|
* Oct 28, 2014 3720 mapeters Added refTime and forecastTime to unique
|
||||||
|
* constraints.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -78,8 +80,8 @@ import com.raytheon.uf.common.time.DataTime;
|
||||||
@Entity
|
@Entity
|
||||||
@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "redbookseq")
|
@SequenceGenerator(initialValue = 1, name = PluginDataObject.ID_GEN, sequenceName = "redbookseq")
|
||||||
@Table(name = "redbook", uniqueConstraints = { @UniqueConstraint(columnNames = {
|
@Table(name = "redbook", uniqueConstraints = { @UniqueConstraint(columnNames = {
|
||||||
"wmoTTAAii", "corIndicator", "fcstHours", "productId", "fileId",
|
"refTime", "forecastTime", "wmoTTAAii", "corIndicator", "fcstHours",
|
||||||
"originatorId" }) })
|
"productId", "fileId", "originatorId" }) })
|
||||||
/*
|
/*
|
||||||
* Both refTime and forecastTime are included in the refTimeIndex since
|
* Both refTime and forecastTime are included in the refTimeIndex since
|
||||||
* forecastTime is unlikely to be used.
|
* forecastTime is unlikely to be used.
|
||||||
|
|
|
@ -0,0 +1,154 @@
|
||||||
|
/**
|
||||||
|
* This software was developed and / or modified by Raytheon Company,
|
||||||
|
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
*
|
||||||
|
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
* This software product contains export-restricted data whose
|
||||||
|
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
* to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
* an export license or other authorization.
|
||||||
|
*
|
||||||
|
* Contractor Name: Raytheon Company
|
||||||
|
* Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
* Mail Stop B8
|
||||||
|
* Omaha, NE 68106
|
||||||
|
* 402.291.0100
|
||||||
|
*
|
||||||
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
* further licensing information.
|
||||||
|
**/
|
||||||
|
package com.raytheon.uf.common.dataplugin.text.db;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import com.raytheon.uf.common.localization.FileUpdatedMessage;
|
||||||
|
import com.raytheon.uf.common.localization.ILocalizationFileObserver;
|
||||||
|
import com.raytheon.uf.common.localization.IPathManager;
|
||||||
|
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||||
|
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||||
|
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||||
|
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||||
|
import com.raytheon.uf.common.localization.exception.LocalizationException;
|
||||||
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
|
import com.raytheon.uf.common.util.FileUtil;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utilities to support mixed case product generation on a per Product ID (nnn)
|
||||||
|
* basis
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
*
|
||||||
|
* SOFTWARE HISTORY
|
||||||
|
*
|
||||||
|
* Date Ticket# Engineer Description
|
||||||
|
* ------------ ---------- ----------- --------------------------
|
||||||
|
* Oct 1, 2014 #3685 randerso Initial creation
|
||||||
|
*
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @author randerso
|
||||||
|
* @version 1.0
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class MixedCaseProductSupport {
|
||||||
|
private static final IUFStatusHandler statusHandler = UFStatus
|
||||||
|
.getHandler(MixedCaseProductSupport.class);
|
||||||
|
|
||||||
|
private static final String MIXED_CASE_DIR = "mixedCase";
|
||||||
|
|
||||||
|
private static final String MIXED_CASE_PIDS_FILE = FileUtil.join(
|
||||||
|
MIXED_CASE_DIR, "mixedCaseProductIds.txt");
|
||||||
|
|
||||||
|
private static final char COMMENT_DELIMITER = '#';
|
||||||
|
|
||||||
|
private static Set<String> mixedCasePids;
|
||||||
|
|
||||||
|
private static IPathManager pm = PathManagerFactory.getPathManager();
|
||||||
|
|
||||||
|
private static LocalizationFile baseDir;
|
||||||
|
|
||||||
|
public static Set<String> getMixedCasePids() {
|
||||||
|
// setup up the file updated observer
|
||||||
|
synchronized (MixedCaseProductSupport.class) {
|
||||||
|
if (baseDir == null) {
|
||||||
|
baseDir = pm.getLocalizationFile(
|
||||||
|
pm.getContext(LocalizationType.COMMON_STATIC,
|
||||||
|
LocalizationLevel.BASE), MIXED_CASE_DIR);
|
||||||
|
baseDir.addFileUpdatedObserver(new ILocalizationFileObserver() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void fileUpdated(FileUpdatedMessage message) {
|
||||||
|
mixedCasePids = null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
synchronized (MixedCaseProductSupport.class) {
|
||||||
|
if (mixedCasePids == null) {
|
||||||
|
|
||||||
|
// get all localization files in the hierarchy and merge them.
|
||||||
|
Map<LocalizationLevel, LocalizationFile> fileHierarchy = pm
|
||||||
|
.getTieredLocalizationFile(
|
||||||
|
LocalizationType.COMMON_STATIC,
|
||||||
|
MIXED_CASE_PIDS_FILE);
|
||||||
|
|
||||||
|
Set<String> newPids = new HashSet<String>();
|
||||||
|
for (LocalizationFile lf : fileHierarchy.values()) {
|
||||||
|
String filePath = lf.getFile().getAbsolutePath();
|
||||||
|
try (BufferedReader in = new BufferedReader(
|
||||||
|
new InputStreamReader(lf.openInputStream()))) {
|
||||||
|
|
||||||
|
String line;
|
||||||
|
while ((line = in.readLine()) != null) {
|
||||||
|
int pos = line.indexOf(COMMENT_DELIMITER);
|
||||||
|
if (pos >= 0) {
|
||||||
|
line = line.substring(0, pos);
|
||||||
|
}
|
||||||
|
line = line.trim().toUpperCase();
|
||||||
|
String[] pids = line.split("[\\s,]+");
|
||||||
|
for (String pid : pids) {
|
||||||
|
if (pid.length() == 3) {
|
||||||
|
newPids.add(pid);
|
||||||
|
} else if (pid.isEmpty()) {
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
statusHandler.warn("Invalid Product ID \""
|
||||||
|
+ pid + "\" found in " + filePath
|
||||||
|
+ ", ignored.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mixedCasePids = newPids;
|
||||||
|
} catch (IOException e) {
|
||||||
|
statusHandler.error("Error reading " + filePath, e);
|
||||||
|
} catch (LocalizationException e) {
|
||||||
|
statusHandler.error("Error retrieving " + filePath, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mixedCasePids = newPids;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mixedCasePids;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean isMixedCase(String pid) {
|
||||||
|
return getMixedCasePids().contains(pid.toUpperCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String conditionalToUpper(String pid, String text) {
|
||||||
|
if (!isMixedCase(pid)) {
|
||||||
|
text = text.toUpperCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
# This file contains the product IDs (nnn) that should be sent in mixed case.
|
||||||
|
# Product IDs should be 3 characters long and delimited by commas or white space.
|
||||||
|
# Overrides to the base file will add to the list of mixed case products
|
||||||
|
|
||||||
|
AFD PNS RWS PWO TCD TWD TWO WRK # Phase 1 Products
|
|
@ -27,6 +27,7 @@ import com.vividsolutions.jts.io.WKTWriter;
|
||||||
* Apr 29, 2011 DR#8986 zhao Read in "counties", not "forecast zones",
|
* Apr 29, 2011 DR#8986 zhao Read in "counties", not "forecast zones",
|
||||||
* Feb 22, 2012 14413 zhao modified getAdjacentZones to add "C" or "Z"
|
* Feb 22, 2012 14413 zhao modified getAdjacentZones to add "C" or "Z"
|
||||||
* Apr 30, 2014 3086 skorolev Replaced MonitorConfigurationManager with FSSObsMonitorConfigurationManager
|
* Apr 30, 2014 3086 skorolev Replaced MonitorConfigurationManager with FSSObsMonitorConfigurationManager
|
||||||
|
* Oct 17, 2014 2757 skorolev Corrected SQL in the getAdjacentZones to avoid duplicates.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -397,13 +398,13 @@ public class MonitorAreaUtils {
|
||||||
public static List<String> getAdjacentZones(String[] cwaList) {
|
public static List<String> getAdjacentZones(String[] cwaList) {
|
||||||
List<String> zones = new ArrayList<String>();
|
List<String> zones = new ArrayList<String>();
|
||||||
|
|
||||||
String sqlCounty = "select state, fips from "
|
String sqlCounty = "select distinct state, fips from "
|
||||||
+ FSSObsMonitorConfigurationManager.COUNTY_TABLE
|
+ FSSObsMonitorConfigurationManager.COUNTY_TABLE
|
||||||
+ " where cwa in (''";
|
+ " where cwa in (''";
|
||||||
String sqlForecastZone = "select state, zone from "
|
String sqlForecastZone = "select distinct state, zone from "
|
||||||
+ FSSObsMonitorConfigurationManager.FORECAST_ZONE_TABLE
|
+ FSSObsMonitorConfigurationManager.FORECAST_ZONE_TABLE
|
||||||
+ " where cwa in (''";
|
+ " where cwa in (''";
|
||||||
String sqlMaritimeZone = "select id from "
|
String sqlMaritimeZone = "select distinct id from "
|
||||||
+ FSSObsMonitorConfigurationManager.MARINE_ZONE_TABLE
|
+ FSSObsMonitorConfigurationManager.MARINE_ZONE_TABLE
|
||||||
+ " where wfo in (''";
|
+ " where wfo in (''";
|
||||||
for (int i = 0; i < cwaList.length; i++) {
|
for (int i = 0; i < cwaList.length; i++) {
|
||||||
|
|
|
@ -47,6 +47,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||||
* Mar 27, 2014 2811 skorolev Updated logger.
|
* Mar 27, 2014 2811 skorolev Updated logger.
|
||||||
* Jun 06, 2014 2061 bsteffen Extend PointDataPluginDao
|
* Jun 06, 2014 2061 bsteffen Extend PointDataPluginDao
|
||||||
|
* 10/28/2014 3454 bphillip Fix usage of getSession()
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -122,13 +123,14 @@ public class ACARSDao extends PointDataPluginDao<ACARSRecord> {
|
||||||
* The HQL query string
|
* The HQL query string
|
||||||
* @return The list of objects returned by the query
|
* @return The list of objects returned by the query
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
public List<?> executeACARSQuery(final String hqlQuery) {
|
public List<?> executeACARSQuery(final String hqlQuery) {
|
||||||
|
|
||||||
List<?> result = (List<?>) txTemplate
|
List<?> result = (List<?>) txTemplate
|
||||||
.execute(new TransactionCallback() {
|
.execute(new TransactionCallback() {
|
||||||
@Override
|
@Override
|
||||||
public List<?> doInTransaction(TransactionStatus status) {
|
public List<?> doInTransaction(TransactionStatus status) {
|
||||||
Query hibQuery = getSession(false)
|
Query hibQuery = getCurrentSession()
|
||||||
.createQuery(hqlQuery);
|
.createQuery(hqlQuery);
|
||||||
// hibQuery.setCacheMode(CacheMode.NORMAL);
|
// hibQuery.setCacheMode(CacheMode.NORMAL);
|
||||||
// hibQuery.setCacheRegion(QUERY_CACHE_REGION);
|
// hibQuery.setCacheRegion(QUERY_CACHE_REGION);
|
||||||
|
|
|
@ -43,6 +43,7 @@ import com.raytheon.uf.edex.plugin.acarssounding.tools.ACARSSoundingTools;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Jan 21, 2009 1939 jkorman Initial creation
|
* Jan 21, 2009 1939 jkorman Initial creation
|
||||||
* Aug 18, 2014 3530 bclement removed warning from executeSoundingQuery()
|
* Aug 18, 2014 3530 bclement removed warning from executeSoundingQuery()
|
||||||
|
* 10/28/2014 3454 bphillip Fix usage of getSession()
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -107,7 +108,7 @@ public class ACARSSoundingDao extends DefaultPluginDao {
|
||||||
List<?> result = (List<?>) txTemplate
|
List<?> result = (List<?>) txTemplate
|
||||||
.execute(new TransactionCallback<Object>() {
|
.execute(new TransactionCallback<Object>() {
|
||||||
public List<?> doInTransaction(TransactionStatus status) {
|
public List<?> doInTransaction(TransactionStatus status) {
|
||||||
Query hibQuery = getSession(false)
|
Query hibQuery = getCurrentSession()
|
||||||
.createQuery(hqlQuery);
|
.createQuery(hqlQuery);
|
||||||
return hibQuery.list();
|
return hibQuery.list();
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,6 +94,8 @@
|
||||||
<permission id="com.raytheon.localization.site/common_static/archiver/purger"/>
|
<permission id="com.raytheon.localization.site/common_static/archiver/purger"/>
|
||||||
<permission id="com.raytheon.localization.site/common_static/archiver/purger/retention"/>
|
<permission id="com.raytheon.localization.site/common_static/archiver/purger/retention"/>
|
||||||
<permission id="com.raytheon.localization.site/common_static/archiver/purger/case"/>
|
<permission id="com.raytheon.localization.site/common_static/archiver/purger/case"/>
|
||||||
|
|
||||||
|
<permission id="com.raytheon.localization.site/common_static/mixedCase"/>
|
||||||
|
|
||||||
<user userId="ALL">
|
<user userId="ALL">
|
||||||
<userPermission>com.raytheon.localization.site/common_static/purge</userPermission>
|
<userPermission>com.raytheon.localization.site/common_static/purge</userPermission>
|
||||||
|
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.time.util.ITimer;
|
||||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||||
import com.raytheon.uf.common.wmo.WMOHeader;
|
import com.raytheon.uf.common.wmo.WMOHeader;
|
||||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||||
|
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||||
import com.raytheon.uf.edex.plugin.redbook.dao.RedbookDao;
|
import com.raytheon.uf.edex.plugin.redbook.dao.RedbookDao;
|
||||||
import com.raytheon.uf.edex.plugin.redbook.decoder.RedbookParser;
|
import com.raytheon.uf.edex.plugin.redbook.decoder.RedbookParser;
|
||||||
|
|
||||||
|
@ -61,6 +62,8 @@ import com.raytheon.uf.edex.plugin.redbook.decoder.RedbookParser;
|
||||||
* Mar 13, 2014 2907 njensen split edex.redbook plugin into common and
|
* Mar 13, 2014 2907 njensen split edex.redbook plugin into common and
|
||||||
* edex redbook plugins
|
* edex redbook plugins
|
||||||
* May 14, 2014 2536 bclement moved WMO Header to common
|
* May 14, 2014 2536 bclement moved WMO Header to common
|
||||||
|
* Oct 24, 2014 3720 mapeters Identify existing records using unique
|
||||||
|
* constraints instead of dataURI.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author jkorman
|
* @author jkorman
|
||||||
|
@ -198,13 +201,26 @@ public class RedbookDecoder extends AbstractDecoder {
|
||||||
|
|
||||||
private RedbookRecord createdBackDatedVersionIfNeeded(RedbookRecord record) {
|
private RedbookRecord createdBackDatedVersionIfNeeded(RedbookRecord record) {
|
||||||
RedbookDao dao;
|
RedbookDao dao;
|
||||||
RedbookRecord existingRecord;
|
RedbookRecord existingRecord = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
dao = (RedbookDao) PluginFactory.getInstance().getPluginDao(
|
dao = (RedbookDao) PluginFactory.getInstance().getPluginDao(
|
||||||
PLUGIN_NAME);
|
PLUGIN_NAME);
|
||||||
existingRecord = (RedbookRecord) dao.getMetadata(record
|
DatabaseQuery query = new DatabaseQuery(RedbookRecord.class);
|
||||||
.getDataURI());
|
query.addQueryParam("wmoTTAAii", record.getWmoTTAAii());
|
||||||
|
query.addQueryParam("corIndicator", record.getCorIndicator());
|
||||||
|
query.addQueryParam("fcstHours", record.getFcstHours());
|
||||||
|
query.addQueryParam("productId", record.getProductId());
|
||||||
|
query.addQueryParam("fileId", record.getFileId());
|
||||||
|
query.addQueryParam("originatorId", record.getOriginatorId());
|
||||||
|
query.addQueryParam("dataTime", record.getDataTime());
|
||||||
|
|
||||||
|
PluginDataObject[] resultList = dao.getMetadata(query);
|
||||||
|
|
||||||
|
if (resultList != null && resultList.length > 0
|
||||||
|
&& resultList[0] instanceof RedbookRecord) {
|
||||||
|
existingRecord = (RedbookRecord) resultList[0];
|
||||||
|
}
|
||||||
} catch (PluginException e) {
|
} catch (PluginException e) {
|
||||||
logger.error(traceId + "Could not create back-dated copy of "
|
logger.error(traceId + "Could not create back-dated copy of "
|
||||||
+ record.getDataURI(), e);
|
+ record.getDataURI(), e);
|
||||||
|
|
|
@ -41,13 +41,13 @@ import org.hibernate.Criteria;
|
||||||
import org.hibernate.HibernateException;
|
import org.hibernate.HibernateException;
|
||||||
import org.hibernate.Query;
|
import org.hibernate.Query;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
import org.hibernate.StatelessSession;
|
|
||||||
import org.hibernate.Transaction;
|
|
||||||
import org.hibernate.criterion.Order;
|
import org.hibernate.criterion.Order;
|
||||||
import org.hibernate.criterion.ProjectionList;
|
import org.hibernate.criterion.ProjectionList;
|
||||||
import org.hibernate.criterion.Projections;
|
import org.hibernate.criterion.Projections;
|
||||||
import org.hibernate.criterion.Restrictions;
|
import org.hibernate.criterion.Restrictions;
|
||||||
import org.springframework.orm.hibernate4.SessionFactoryUtils;
|
import org.springframework.orm.hibernate4.SessionFactoryUtils;
|
||||||
|
import org.springframework.transaction.TransactionStatus;
|
||||||
|
import org.springframework.transaction.support.TransactionCallback;
|
||||||
|
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.OperationalStdTextProduct;
|
import com.raytheon.uf.common.dataplugin.text.db.OperationalStdTextProduct;
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.PracticeStdTextProduct;
|
import com.raytheon.uf.common.dataplugin.text.db.PracticeStdTextProduct;
|
||||||
|
@ -93,6 +93,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
||||||
* May 20, 2014 2536 bclement moved from edex.textdb to edex.plugin.text
|
* May 20, 2014 2536 bclement moved from edex.textdb to edex.plugin.text
|
||||||
* Sep 18, 2014 3627 mapeters Updated deprecated {@link TimeTools} usage.
|
* Sep 18, 2014 3627 mapeters Updated deprecated {@link TimeTools} usage.
|
||||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||||
|
* 10/28/2014 3454 bphillip Fix usage of getSession()
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author garmendariz
|
* @author garmendariz
|
||||||
|
@ -191,47 +192,52 @@ public class StdTextProductDao extends CoreDao {
|
||||||
prodId.setCccid(ccc);
|
prodId.setCccid(ccc);
|
||||||
prodId.setNnnid(nnn);
|
prodId.setNnnid(nnn);
|
||||||
prodId.setXxxid(xxx);
|
prodId.setXxxid(xxx);
|
||||||
|
Session session = this.getSession();
|
||||||
try {
|
try {
|
||||||
Query query = this.getSession().createQuery(
|
|
||||||
"SELECT refTime from "
|
|
||||||
+ textProduct.getClass().getSimpleName()
|
|
||||||
+ " where prodId = :prodid");
|
|
||||||
query.setParameter("prodid", prodId);
|
|
||||||
List<?> results = query.list();
|
|
||||||
|
|
||||||
if (results == null || results.size() < 1) {
|
|
||||||
// save
|
|
||||||
create(textProduct);
|
|
||||||
success = true;
|
|
||||||
} else {
|
|
||||||
// don't save
|
|
||||||
success = false;
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.error("Error storing text product", e);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (success) {
|
|
||||||
try {
|
try {
|
||||||
String cccid = prodId.getCccid();
|
Query query = session.createQuery("SELECT refTime from "
|
||||||
String nnnid = prodId.getNnnid();
|
+ textProduct.getClass().getSimpleName()
|
||||||
String xxxid = prodId.getXxxid();
|
+ " where prodId = :prodid");
|
||||||
Query query = this
|
query.setParameter("prodid", prodId);
|
||||||
.getSession()
|
|
||||||
.createQuery(
|
|
||||||
"SELECT versionstokeep FROM TextProductInfo WHERE "
|
|
||||||
+ "prodId.cccid = :cccid AND prodId.nnnid = :nnnid AND prodId.xxxid = :xxxid");
|
|
||||||
query.setParameter("cccid", cccid);
|
|
||||||
query.setParameter("nnnid", nnnid);
|
|
||||||
query.setParameter("xxxid", xxxid);
|
|
||||||
List<?> results = query.list();
|
List<?> results = query.list();
|
||||||
|
|
||||||
if (results == null || results.size() < 1) {
|
if (results == null || results.size() < 1) {
|
||||||
TextProductInfo tpi = new TextProductInfo(cccid, nnnid,
|
// save
|
||||||
xxxid);
|
create(textProduct);
|
||||||
create(tpi);
|
success = true;
|
||||||
|
} else {
|
||||||
|
// don't save
|
||||||
|
success = false;
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.error("Error verify text product info", e);
|
logger.error("Error storing text product", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
try {
|
||||||
|
String cccid = prodId.getCccid();
|
||||||
|
String nnnid = prodId.getNnnid();
|
||||||
|
String xxxid = prodId.getXxxid();
|
||||||
|
Query query = session
|
||||||
|
.createQuery(
|
||||||
|
"SELECT versionstokeep FROM TextProductInfo WHERE "
|
||||||
|
+ "prodId.cccid = :cccid AND prodId.nnnid = :nnnid AND prodId.xxxid = :xxxid");
|
||||||
|
query.setParameter("cccid", cccid);
|
||||||
|
query.setParameter("nnnid", nnnid);
|
||||||
|
query.setParameter("xxxid", xxxid);
|
||||||
|
List<?> results = query.list();
|
||||||
|
if (results == null || results.size() < 1) {
|
||||||
|
TextProductInfo tpi = new TextProductInfo(cccid, nnnid,
|
||||||
|
xxxid);
|
||||||
|
create(tpi);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Error verify text product info", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (session != null) {
|
||||||
|
session.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -255,118 +261,134 @@ public class StdTextProductDao extends CoreDao {
|
||||||
* @param pastHours
|
* @param pastHours
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public List<StdTextProduct> cccnnnxxxReadVersion(String ccc, String nnn,
|
public List<StdTextProduct> cccnnnxxxReadVersion(final String ccc, final String nnn,
|
||||||
String xxx, int version) {
|
final String xxx, final int version) {
|
||||||
List<StdTextProduct> products = null;
|
List<StdTextProduct> products = null;
|
||||||
ccc = StringUtils.rightPad(ccc, MAX_FIELD_LENGTH);
|
|
||||||
nnn = StringUtils.rightPad(nnn, MAX_FIELD_LENGTH);
|
|
||||||
xxx = StringUtils.rightPad(xxx, MAX_FIELD_LENGTH);
|
|
||||||
boolean hasCCC = ((ccc != null) && (ccc.length() > 0) && (!ccc
|
boolean hasCCC = ((ccc != null) && (ccc.length() > 0) && (!ccc
|
||||||
.equals("000")));
|
.equals("000")));
|
||||||
boolean hasNNN = ((nnn != null) && (nnn.length() > 0) && (!nnn
|
boolean hasNNN = ((nnn != null) && (nnn.length() > 0) && (!nnn
|
||||||
.equals("000")));
|
.equals("000")));
|
||||||
boolean hasXXX = ((xxx != null) && (xxx.length() > 0) && (!xxx
|
boolean hasXXX = ((xxx != null) && (xxx.length() > 0) && (!xxx
|
||||||
.equals("000")));
|
.equals("000")));
|
||||||
boolean createInitialFilter = !(hasCCC && hasNNN && hasXXX);
|
final boolean createInitialFilter = !(hasCCC && hasNNN && hasXXX);
|
||||||
|
|
||||||
AFOSProductId[] afosIds = null;
|
|
||||||
StatelessSession session = null;
|
|
||||||
Transaction tx = null;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
session = getSessionFactory().openStatelessSession();
|
final AFOSProductId[] afosIds = txTemplate
|
||||||
tx = session.beginTransaction();
|
.execute(new TransactionCallback<AFOSProductId[]>() {
|
||||||
StdTextProduct stdTextProduct = getStdTextProductInstance();
|
|
||||||
|
|
||||||
if (createInitialFilter) {
|
@Override
|
||||||
stdTextProduct.setCccid(ccc);
|
public AFOSProductId[] doInTransaction(
|
||||||
stdTextProduct.setNnnid(nnn);
|
TransactionStatus status) {
|
||||||
stdTextProduct.setXxxid(xxx);
|
String paddedccc = StringUtils.rightPad(ccc,
|
||||||
|
MAX_FIELD_LENGTH);
|
||||||
|
String paddednnn = StringUtils.rightPad(nnn,
|
||||||
|
MAX_FIELD_LENGTH);
|
||||||
|
String paddedxxx = StringUtils.rightPad(xxx,
|
||||||
|
MAX_FIELD_LENGTH);
|
||||||
|
Session session = getCurrentSession();
|
||||||
|
AFOSProductId[] afosIds = null;
|
||||||
|
StdTextProduct stdTextProduct = getStdTextProductInstance();
|
||||||
|
|
||||||
Map<String, String> map = buildCriterions(ProdCCC_ID, ccc,
|
if (createInitialFilter) {
|
||||||
ProdNNN_ID, nnn, ProdXXX_ID, xxx);
|
stdTextProduct.setCccid(paddedccc);
|
||||||
Criteria criteria = session.createCriteria(stdTextProduct
|
stdTextProduct.setNnnid(paddednnn);
|
||||||
.getClass());
|
stdTextProduct.setXxxid(paddedxxx);
|
||||||
ProjectionList projList = Projections.projectionList();
|
|
||||||
projList.add(Projections.property(ProdCCC_ID));
|
|
||||||
projList.add(Projections.property(ProdNNN_ID));
|
|
||||||
projList.add(Projections.property(ProdXXX_ID));
|
|
||||||
criteria.setProjection(Projections.distinct(projList));
|
|
||||||
criteria.add(Restrictions.allEq(map));
|
|
||||||
criteria.addOrder(Order.asc(ProdCCC_ID));
|
|
||||||
criteria.addOrder(Order.asc(ProdNNN_ID));
|
|
||||||
criteria.addOrder(Order.asc(ProdXXX_ID));
|
|
||||||
|
|
||||||
List<?> list = criteria.list();
|
Map<String, String> map = buildCriterions(
|
||||||
if (list != null && list.size() > 0) {
|
ProdCCC_ID, paddedccc, ProdNNN_ID,
|
||||||
afosIds = new AFOSProductId[list.size()];
|
paddednnn, ProdXXX_ID, paddedxxx);
|
||||||
int i = 0;
|
Criteria criteria = session
|
||||||
for (Object row : list) {
|
.createCriteria(stdTextProduct
|
||||||
Object[] cols = (Object[]) row;
|
.getClass());
|
||||||
afosIds[i++] = new AFOSProductId((String) cols[0],
|
ProjectionList projList = Projections
|
||||||
(String) cols[1], (String) cols[2]);
|
.projectionList();
|
||||||
}
|
projList.add(Projections.property(ProdCCC_ID));
|
||||||
} else {
|
projList.add(Projections.property(ProdNNN_ID));
|
||||||
afosIds = new AFOSProductId[0];
|
projList.add(Projections.property(ProdXXX_ID));
|
||||||
}
|
criteria.setProjection(Projections
|
||||||
tx.commit();
|
.distinct(projList));
|
||||||
} else {
|
criteria.add(Restrictions.allEq(map));
|
||||||
afosIds = new AFOSProductId[1];
|
criteria.addOrder(Order.asc(ProdCCC_ID));
|
||||||
afosIds[0] = new AFOSProductId(ccc, nnn, xxx);
|
criteria.addOrder(Order.asc(ProdNNN_ID));
|
||||||
}
|
criteria.addOrder(Order.asc(ProdXXX_ID));
|
||||||
|
|
||||||
|
List<?> list = criteria.list();
|
||||||
|
if (list != null && list.size() > 0) {
|
||||||
|
afosIds = new AFOSProductId[list.size()];
|
||||||
|
int i = 0;
|
||||||
|
for (Object row : list) {
|
||||||
|
Object[] cols = (Object[]) row;
|
||||||
|
afosIds[i++] = new AFOSProductId(
|
||||||
|
(String) cols[0],
|
||||||
|
(String) cols[1],
|
||||||
|
(String) cols[2]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
afosIds = new AFOSProductId[0];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
afosIds = new AFOSProductId[1];
|
||||||
|
afosIds[0] = new AFOSProductId(paddedccc,
|
||||||
|
paddednnn, paddedxxx);
|
||||||
|
}
|
||||||
|
return afosIds;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
products = txTemplate.execute(new TransactionCallback<List<StdTextProduct>>() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<StdTextProduct> doInTransaction(
|
||||||
|
TransactionStatus status) {
|
||||||
|
List<StdTextProduct> products = null;
|
||||||
|
Session session = getCurrentSession();
|
||||||
|
/*
|
||||||
|
* DR15244 - Make sure that the query is performed on the appropriate
|
||||||
|
* table based on what StdTextProduct is requested (ultimately on CAVE mode)
|
||||||
|
*/
|
||||||
|
Matcher m = Pattern.compile("StdTextProduct").matcher(AFOS_QUERY_STMT);
|
||||||
|
String tableName = getStdTextProductInstance().getClass().getSimpleName();
|
||||||
|
String tableQuery = m.replaceAll(tableName);
|
||||||
|
Query query = session.createQuery(tableQuery);
|
||||||
|
|
||||||
|
|
||||||
|
if (version >= 0) {
|
||||||
|
query.setMaxResults(version + 1);
|
||||||
|
}
|
||||||
|
for (AFOSProductId afosId : afosIds) {
|
||||||
|
query.setParameter(CCC_ID, afosId.getCcc());
|
||||||
|
query.setParameter(NNN_ID, afosId.getNnn());
|
||||||
|
query.setParameter(XXX_ID, afosId.getXxx());
|
||||||
|
|
||||||
|
List<?> results = query.list();
|
||||||
|
if (results != null && results.size() > 0) {
|
||||||
|
if (version == -1) {
|
||||||
|
// want all versions
|
||||||
|
if (products == null) {
|
||||||
|
products = new ArrayList<StdTextProduct>(
|
||||||
|
results.size() * afosIds.length);
|
||||||
|
}
|
||||||
|
for (Object row : results) {
|
||||||
|
products.add((StdTextProduct) row);
|
||||||
|
}
|
||||||
|
} else if (results.size() > version) {
|
||||||
|
// want specific version
|
||||||
|
if (products == null) {
|
||||||
|
products = new ArrayList<StdTextProduct>(
|
||||||
|
afosIds.length);
|
||||||
|
}
|
||||||
|
products.add((StdTextProduct) results.get(version));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return products;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
tx = session.beginTransaction();
|
|
||||||
/*
|
|
||||||
* DR15244 - Make sure that the query is performed on the appropriate
|
|
||||||
* table based on what StdTextProduct is requested (ultimately on CAVE mode)
|
|
||||||
*/
|
|
||||||
Matcher m = Pattern.compile("StdTextProduct").matcher(AFOS_QUERY_STMT);
|
|
||||||
String tableName = stdTextProduct.getClass().getSimpleName();
|
|
||||||
String tableQuery = m.replaceAll(tableName);
|
|
||||||
Query query = session.createQuery(tableQuery);
|
|
||||||
|
|
||||||
|
|
||||||
if (version >= 0) {
|
|
||||||
query.setMaxResults(version + 1);
|
|
||||||
}
|
|
||||||
for (AFOSProductId afosId : afosIds) {
|
|
||||||
query.setParameter(CCC_ID, afosId.getCcc());
|
|
||||||
query.setParameter(NNN_ID, afosId.getNnn());
|
|
||||||
query.setParameter(XXX_ID, afosId.getXxx());
|
|
||||||
|
|
||||||
List<?> results = query.list();
|
|
||||||
if (results != null && results.size() > 0) {
|
|
||||||
if (version == -1) {
|
|
||||||
// want all versions
|
|
||||||
if (products == null) {
|
|
||||||
products = new ArrayList<StdTextProduct>(
|
|
||||||
results.size() * afosIds.length);
|
|
||||||
}
|
|
||||||
for (Object row : results) {
|
|
||||||
products.add((StdTextProduct) row);
|
|
||||||
}
|
|
||||||
} else if (results.size() > version) {
|
|
||||||
// want specific version
|
|
||||||
if (products == null) {
|
|
||||||
products = new ArrayList<StdTextProduct>(
|
|
||||||
afosIds.length);
|
|
||||||
}
|
|
||||||
products.add((StdTextProduct) results.get(version));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tx.commit();
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.error("Error occurred reading products", e);
|
logger.error("Error occurred reading products", e);
|
||||||
if (tx != null) {
|
|
||||||
try {
|
|
||||||
tx.rollback();
|
|
||||||
} catch (Exception e1) {
|
|
||||||
logger.error("Error occurred rolling back transaction", e1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
closeSession(session);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (products == null) {
|
if (products == null) {
|
||||||
|
@ -534,8 +556,10 @@ public class StdTextProductDao extends CoreDao {
|
||||||
public long getLatestTime(AFOSProductId afosId) {
|
public long getLatestTime(AFOSProductId afosId) {
|
||||||
long latestTime = 0L;
|
long latestTime = 0L;
|
||||||
|
|
||||||
|
Session sess = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Session sess = getSession();
|
sess = getSession();
|
||||||
|
|
||||||
Map<?, ?> tmp = buildCriterions(ProdCCC_ID, afosId.getCcc(),
|
Map<?, ?> tmp = buildCriterions(ProdCCC_ID, afosId.getCcc(),
|
||||||
ProdNNN_ID, afosId.getNnn(), ProdXXX_ID, afosId.getXxx());
|
ProdNNN_ID, afosId.getNnn(), ProdXXX_ID, afosId.getXxx());
|
||||||
|
@ -557,6 +581,10 @@ public class StdTextProductDao extends CoreDao {
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.error("Error occurred getting latest time", e);
|
logger.error("Error occurred getting latest time", e);
|
||||||
|
}finally{
|
||||||
|
if(sess != null){
|
||||||
|
sess.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return latestTime;
|
return latestTime;
|
||||||
|
@ -625,55 +653,41 @@ public class StdTextProductDao extends CoreDao {
|
||||||
}
|
}
|
||||||
|
|
||||||
private AFOSProductId[] getDistinctAfosIds() throws HibernateException {
|
private AFOSProductId[] getDistinctAfosIds() throws HibernateException {
|
||||||
|
return txTemplate.execute(new TransactionCallback<AFOSProductId[]>() {
|
||||||
|
@Override
|
||||||
|
public AFOSProductId[] doInTransaction(TransactionStatus status) {
|
||||||
|
AFOSProductId[] products = null;
|
||||||
|
Session sess = getCurrentSession();
|
||||||
|
Criteria crit = sess
|
||||||
|
.createCriteria((operationalMode ? OperationalStdTextProduct.class
|
||||||
|
: PracticeStdTextProduct.class));
|
||||||
|
ProjectionList fields = Projections.projectionList();
|
||||||
|
fields.add(Projections.property(ProdCCC_ID));
|
||||||
|
fields.add(Projections.property(ProdNNN_ID));
|
||||||
|
fields.add(Projections.property(ProdXXX_ID));
|
||||||
|
crit.setProjection(Projections.distinct(fields));
|
||||||
|
crit.addOrder(Order.asc(ProdCCC_ID));
|
||||||
|
crit.addOrder(Order.asc(ProdNNN_ID));
|
||||||
|
crit.addOrder(Order.asc(ProdXXX_ID));
|
||||||
|
List<?> results = crit.list();
|
||||||
|
|
||||||
AFOSProductId[] products = null;
|
if (results != null && results.size() > 0) {
|
||||||
StatelessSession sess = null;
|
products = new AFOSProductId[results.size()];
|
||||||
Transaction tx = null;
|
String cccid = null;
|
||||||
|
String nnnid = null;
|
||||||
try {
|
String xxxid = null;
|
||||||
sess = getSessionFactory().openStatelessSession();
|
int i = 0;
|
||||||
tx = sess.beginTransaction();
|
for (Object row : results) {
|
||||||
Criteria crit = sess
|
Object[] cols = (Object[]) row;
|
||||||
.createCriteria((this.operationalMode ? OperationalStdTextProduct.class
|
cccid = cols[0].toString();
|
||||||
: PracticeStdTextProduct.class));
|
nnnid = cols[1].toString();
|
||||||
ProjectionList fields = Projections.projectionList();
|
xxxid = cols[2].toString();
|
||||||
fields.add(Projections.property(ProdCCC_ID));
|
products[i++] = new AFOSProductId(cccid, nnnid, xxxid);
|
||||||
fields.add(Projections.property(ProdNNN_ID));
|
}
|
||||||
fields.add(Projections.property(ProdXXX_ID));
|
|
||||||
crit.setProjection(Projections.distinct(fields));
|
|
||||||
crit.addOrder(Order.asc(ProdCCC_ID));
|
|
||||||
crit.addOrder(Order.asc(ProdNNN_ID));
|
|
||||||
crit.addOrder(Order.asc(ProdXXX_ID));
|
|
||||||
List<?> results = crit.list();
|
|
||||||
|
|
||||||
if (results != null && results.size() > 0) {
|
|
||||||
products = new AFOSProductId[results.size()];
|
|
||||||
String cccid = null;
|
|
||||||
String nnnid = null;
|
|
||||||
String xxxid = null;
|
|
||||||
int i = 0;
|
|
||||||
for (Object row : results) {
|
|
||||||
Object[] cols = (Object[]) row;
|
|
||||||
cccid = cols[0].toString();
|
|
||||||
nnnid = cols[1].toString();
|
|
||||||
xxxid = cols[2].toString();
|
|
||||||
products[i++] = new AFOSProductId(cccid, nnnid, xxxid);
|
|
||||||
}
|
}
|
||||||
|
return products;
|
||||||
}
|
}
|
||||||
tx.commit();
|
});
|
||||||
tx = null;
|
|
||||||
} finally {
|
|
||||||
if (tx != null) {
|
|
||||||
try {
|
|
||||||
tx.rollback();
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.error("Caught Exception rolling back transaction", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
closeSession(sess);
|
|
||||||
}
|
|
||||||
|
|
||||||
return products;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -693,8 +707,6 @@ public class StdTextProductDao extends CoreDao {
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public int versionPurge(String afosId) {
|
public int versionPurge(String afosId) {
|
||||||
StatelessSession session = null;
|
|
||||||
Transaction tx = null;
|
|
||||||
int rval = 0;
|
int rval = 0;
|
||||||
if (PurgeLogger.isDebugEnabled()) {
|
if (PurgeLogger.isDebugEnabled()) {
|
||||||
if (afosId == null) {
|
if (afosId == null) {
|
||||||
|
@ -721,120 +733,103 @@ public class StdTextProductDao extends CoreDao {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ids != null && ids.size() > 0) {
|
if (ids != null && ids.size() > 0) {
|
||||||
String cccid = null;
|
|
||||||
String nnnid = null;
|
|
||||||
String xxxid = null;
|
|
||||||
|
|
||||||
String refTimeQueryString = null;
|
StringBuilder refTimeQueryBuilder = new StringBuilder(200);
|
||||||
{
|
refTimeQueryBuilder.append("SELECT refTime FROM ");
|
||||||
StringBuilder refTimeQueryBuilder = new StringBuilder(200);
|
refTimeQueryBuilder.append(getStdTextProductInstance()
|
||||||
refTimeQueryBuilder.append("SELECT refTime FROM ");
|
.getClass().getSimpleName());
|
||||||
refTimeQueryBuilder.append(getStdTextProductInstance()
|
refTimeQueryBuilder.append(" WHERE ");
|
||||||
.getClass().getSimpleName());
|
refTimeQueryBuilder.append(ProdCCC_ID).append(" = :cccid")
|
||||||
refTimeQueryBuilder.append(" WHERE ");
|
.append(" AND ");
|
||||||
refTimeQueryBuilder.append(ProdCCC_ID).append(" = :cccid")
|
refTimeQueryBuilder.append(ProdNNN_ID).append(" = :nnnid")
|
||||||
.append(" AND ");
|
.append(" AND ");
|
||||||
refTimeQueryBuilder.append(ProdNNN_ID).append(" = :nnnid")
|
refTimeQueryBuilder.append(ProdXXX_ID).append(" = :xxxid");
|
||||||
.append(" AND ");
|
refTimeQueryBuilder.append(" ORDER BY refTime DESC");
|
||||||
refTimeQueryBuilder.append(ProdXXX_ID).append(" = :xxxid");
|
refTimeQueryBuilder.append(", insertTime DESC");
|
||||||
refTimeQueryBuilder.append(" ORDER BY refTime DESC");
|
final String refTimeQueryString = refTimeQueryBuilder.toString();
|
||||||
refTimeQueryBuilder.append(", insertTime DESC");
|
|
||||||
refTimeQueryString = refTimeQueryBuilder.toString();
|
|
||||||
}
|
StringBuilder delQueryBuilder = new StringBuilder(200);
|
||||||
|
delQueryBuilder.append("DELETE FROM ");
|
||||||
|
delQueryBuilder.append(getStdTextProductInstance()
|
||||||
|
.getClass().getSimpleName());
|
||||||
|
delQueryBuilder.append(" WHERE ");
|
||||||
|
delQueryBuilder.append(ProdCCC_ID).append(" = :cccid")
|
||||||
|
.append(" AND ");
|
||||||
|
delQueryBuilder.append(ProdNNN_ID).append(" = :nnnid")
|
||||||
|
.append(" AND ");
|
||||||
|
delQueryBuilder.append(ProdXXX_ID).append(" = :xxxid")
|
||||||
|
.append(" AND ");
|
||||||
|
delQueryBuilder.append("refTime < :refTime");
|
||||||
|
final String delQueryString = delQueryBuilder.toString();
|
||||||
|
|
||||||
|
|
||||||
String delQueryString = null;
|
for (final TextProductInfo prodInfo : ids) {
|
||||||
{
|
rval += txTemplate.execute(new TransactionCallback<Integer>() {
|
||||||
StringBuilder delQueryBuilder = new StringBuilder(200);
|
|
||||||
delQueryBuilder.append("DELETE FROM ");
|
|
||||||
delQueryBuilder.append(getStdTextProductInstance()
|
|
||||||
.getClass().getSimpleName());
|
|
||||||
delQueryBuilder.append(" WHERE ");
|
|
||||||
delQueryBuilder.append(ProdCCC_ID).append(" = :cccid")
|
|
||||||
.append(" AND ");
|
|
||||||
delQueryBuilder.append(ProdNNN_ID).append(" = :nnnid")
|
|
||||||
.append(" AND ");
|
|
||||||
delQueryBuilder.append(ProdXXX_ID).append(" = :xxxid")
|
|
||||||
.append(" AND ");
|
|
||||||
delQueryBuilder.append("refTime < :refTime");
|
|
||||||
delQueryString = delQueryBuilder.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
session = getSessionFactory().openStatelessSession();
|
@Override
|
||||||
|
public Integer doInTransaction(TransactionStatus status) {
|
||||||
|
Session session = getCurrentSession();
|
||||||
|
TextProductInfoPK pk = prodInfo.getProdId();
|
||||||
|
String cccid = pk.getCccid();
|
||||||
|
String nnnid = pk.getNnnid();
|
||||||
|
String xxxid = pk.getXxxid();
|
||||||
|
int rowsDeleted = 0;
|
||||||
|
|
||||||
for (TextProductInfo prodInfo : ids) {
|
|
||||||
TextProductInfoPK pk = prodInfo.getProdId();
|
|
||||||
cccid = pk.getCccid();
|
|
||||||
nnnid = pk.getNnnid();
|
|
||||||
xxxid = pk.getXxxid();
|
|
||||||
|
|
||||||
try {
|
|
||||||
tx = session.beginTransaction();
|
|
||||||
Query refTimeQuery = session
|
|
||||||
.createQuery(refTimeQueryString);
|
|
||||||
refTimeQuery.setString("cccid", cccid);
|
|
||||||
refTimeQuery.setString("nnnid", nnnid);
|
|
||||||
refTimeQuery.setString("xxxid", xxxid);
|
|
||||||
refTimeQuery
|
|
||||||
.setMaxResults(prodInfo.getVersionstokeep());
|
|
||||||
List<?> refTimes = refTimeQuery.list();
|
|
||||||
if (refTimes.size() >= prodInfo.getVersionstokeep()) {
|
|
||||||
long refTime = ((Number) refTimes.get(prodInfo
|
|
||||||
.getVersionstokeep() - 1)).longValue();
|
|
||||||
Query delQuery = session
|
|
||||||
.createQuery(delQueryString);
|
|
||||||
delQuery.setString("cccid", cccid);
|
|
||||||
delQuery.setString("nnnid", nnnid);
|
|
||||||
delQuery.setString("xxxid", xxxid);
|
|
||||||
delQuery.setLong("refTime", refTime);
|
|
||||||
|
|
||||||
if (PurgeLogger.isDebugEnabled()) {
|
|
||||||
PurgeLogger.logDebug("Purging records for ["
|
|
||||||
+ cccid + nnnid + xxxid
|
|
||||||
+ "] before refTime [" + refTime + "]",
|
|
||||||
PLUGIN_NAME);
|
|
||||||
}
|
|
||||||
|
|
||||||
int rowsDeleted = delQuery.executeUpdate();
|
|
||||||
|
|
||||||
// commit every afos id purge
|
|
||||||
tx.commit();
|
|
||||||
tx = null;
|
|
||||||
if (PurgeLogger.isDebugEnabled()) {
|
|
||||||
PurgeLogger.logDebug("Purged [" + rowsDeleted
|
|
||||||
+ "] records for [" + cccid + nnnid
|
|
||||||
+ xxxid + "]", PLUGIN_NAME);
|
|
||||||
}
|
|
||||||
rval += rowsDeleted;
|
|
||||||
} else if (PurgeLogger.isDebugEnabled()) {
|
|
||||||
PurgeLogger.logDebug(
|
|
||||||
"VersionPurge: Product [" + cccid + nnnid
|
|
||||||
+ xxxid + "] has fewer than ["
|
|
||||||
+ prodInfo.getVersionstokeep()
|
|
||||||
+ "] versions", PLUGIN_NAME);
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
PurgeLogger.logError(
|
|
||||||
"Exception occurred purging text products ["
|
|
||||||
+ cccid + nnnid + xxxid + "]",
|
|
||||||
PLUGIN_NAME, e);
|
|
||||||
if (tx != null) {
|
|
||||||
try {
|
try {
|
||||||
tx.rollback();
|
Query refTimeQuery = session
|
||||||
} catch (Exception e1) {
|
.createQuery(refTimeQueryString);
|
||||||
PurgeLogger
|
refTimeQuery.setString("cccid", cccid);
|
||||||
.logError(
|
refTimeQuery.setString("nnnid", nnnid);
|
||||||
"Error occurred rolling back transaction",
|
refTimeQuery.setString("xxxid", xxxid);
|
||||||
PLUGIN_NAME, e1);
|
refTimeQuery
|
||||||
|
.setMaxResults(prodInfo.getVersionstokeep());
|
||||||
|
List<?> refTimes = refTimeQuery.list();
|
||||||
|
if (refTimes.size() >= prodInfo.getVersionstokeep()) {
|
||||||
|
long refTime = ((Number) refTimes.get(prodInfo
|
||||||
|
.getVersionstokeep() - 1)).longValue();
|
||||||
|
Query delQuery = session
|
||||||
|
.createQuery(delQueryString);
|
||||||
|
delQuery.setString("cccid", cccid);
|
||||||
|
delQuery.setString("nnnid", nnnid);
|
||||||
|
delQuery.setString("xxxid", xxxid);
|
||||||
|
delQuery.setLong("refTime", refTime);
|
||||||
|
|
||||||
|
if (PurgeLogger.isDebugEnabled()) {
|
||||||
|
PurgeLogger.logDebug("Purging records for ["
|
||||||
|
+ cccid + nnnid + xxxid
|
||||||
|
+ "] before refTime [" + refTime + "]",
|
||||||
|
PLUGIN_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
|
rowsDeleted = delQuery.executeUpdate();
|
||||||
|
if (PurgeLogger.isDebugEnabled()) {
|
||||||
|
PurgeLogger.logDebug("Purged [" + rowsDeleted
|
||||||
|
+ "] records for [" + cccid + nnnid
|
||||||
|
+ xxxid + "]", PLUGIN_NAME);
|
||||||
|
}
|
||||||
|
} else if (PurgeLogger.isDebugEnabled()) {
|
||||||
|
PurgeLogger.logDebug(
|
||||||
|
"VersionPurge: Product [" + cccid + nnnid
|
||||||
|
+ xxxid + "] has fewer than ["
|
||||||
|
+ prodInfo.getVersionstokeep()
|
||||||
|
+ "] versions", PLUGIN_NAME);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
PurgeLogger.logError(
|
||||||
|
"Exception occurred purging text products ["
|
||||||
|
+ cccid + nnnid + xxxid + "]",
|
||||||
|
PLUGIN_NAME, e);
|
||||||
}
|
}
|
||||||
|
return rowsDeleted;
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// don't need to worry about rolling back transaction
|
// don't need to worry about rolling back transaction
|
||||||
PurgeLogger.logError("Error purging text products", PLUGIN_NAME, e);
|
PurgeLogger.logError("Error purging text products", PLUGIN_NAME, e);
|
||||||
} finally {
|
|
||||||
closeSession(session);
|
|
||||||
}
|
}
|
||||||
return rval;
|
return rval;
|
||||||
}
|
}
|
||||||
|
@ -2818,16 +2813,6 @@ public class StdTextProductDao extends CoreDao {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void closeSession(StatelessSession s) {
|
|
||||||
if (s != null) {
|
|
||||||
try {
|
|
||||||
s.close();
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.error("Error closing Session", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void closeConnection(Connection c) {
|
private void closeConnection(Connection c) {
|
||||||
if (c != null) {
|
if (c != null) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.hibernate.Criteria;
|
import org.hibernate.Criteria;
|
||||||
import org.hibernate.Query;
|
import org.hibernate.Query;
|
||||||
|
import org.hibernate.Session;
|
||||||
import org.springframework.transaction.TransactionStatus;
|
import org.springframework.transaction.TransactionStatus;
|
||||||
import org.springframework.transaction.support.TransactionCallback;
|
import org.springframework.transaction.support.TransactionCallback;
|
||||||
|
|
||||||
|
@ -57,6 +58,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||||
* Nov 08, 2013 2361 njensen Chaged method signature of saveOrUpdate(Object)
|
* Nov 08, 2013 2361 njensen Chaged method signature of saveOrUpdate(Object)
|
||||||
* May 22, 2014 2536 bclement moved from autobldsrv to edex.plugin.text
|
* May 22, 2014 2536 bclement moved from autobldsrv to edex.plugin.text
|
||||||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||||
|
* 10/28/2014 3454 bphillip Fix usage of getSession()
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author mfegan
|
* @author mfegan
|
||||||
|
@ -110,25 +112,30 @@ public class SubscriptionDAO extends CoreDao {
|
||||||
*/
|
*/
|
||||||
public boolean write(SubscriptionRecord record) {
|
public boolean write(SubscriptionRecord record) {
|
||||||
// query
|
// query
|
||||||
Query query = this
|
Session session = this.getSession();
|
||||||
.getSession()
|
try {
|
||||||
.createQuery(
|
Query query = session
|
||||||
"from SubscriptionRecord where type = :type and trigger = :trigger and runner = :runner and script = :script and filepath = :filepath and arguments = :arguments");
|
.createQuery("from SubscriptionRecord where type = :type and trigger = :trigger and runner = :runner and script = :script and filepath = :filepath and arguments = :arguments");
|
||||||
query.setParameter("type", record.getType());
|
query.setParameter("type", record.getType());
|
||||||
query.setParameter("trigger", record.getTrigger());
|
query.setParameter("trigger", record.getTrigger());
|
||||||
query.setParameter("runner", record.getRunner());
|
query.setParameter("runner", record.getRunner());
|
||||||
query.setParameter("script", record.getScript());
|
query.setParameter("script", record.getScript());
|
||||||
query.setParameter("filepath", record.getFilepath());
|
query.setParameter("filepath", record.getFilepath());
|
||||||
query.setParameter("arguments", record.getArguments());
|
query.setParameter("arguments", record.getArguments());
|
||||||
List<?> results = query.list();
|
List<?> results = query.list();
|
||||||
|
|
||||||
if (results.size() > 0) {
|
if (results.size() > 0) {
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
create(record);
|
create(record);
|
||||||
sendSubscriptionNotifyMessage(String
|
sendSubscriptionNotifyMessage(String.valueOf(record
|
||||||
.valueOf(record.getIdentifier()));
|
.getIdentifier()));
|
||||||
return true;
|
return true;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (session != null) {
|
||||||
|
session.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,7 +179,7 @@ public class SubscriptionDAO extends CoreDao {
|
||||||
@Override
|
@Override
|
||||||
public List<SubscriptionRecord> doInTransaction(
|
public List<SubscriptionRecord> doInTransaction(
|
||||||
TransactionStatus status) {
|
TransactionStatus status) {
|
||||||
Criteria criteria = getSession().createCriteria(
|
Criteria criteria = getCurrentSession().createCriteria(
|
||||||
daoClass);
|
daoClass);
|
||||||
return criteria.list();
|
return criteria.list();
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.hibernate.Criteria;
|
import org.hibernate.Criteria;
|
||||||
|
import org.hibernate.Session;
|
||||||
|
|
||||||
import com.raytheon.uf.common.dataplugin.text.db.WatchWarn;
|
import com.raytheon.uf.common.dataplugin.text.db.WatchWarn;
|
||||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||||
|
@ -44,6 +45,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||||
* Oct 1, 2008 1538 jkorman Added additional functionality.
|
* Oct 1, 2008 1538 jkorman Added additional functionality.
|
||||||
* Aug 9, 2010 3944 cjeanbap Added method, queryAllWatchWarn.
|
* Aug 9, 2010 3944 cjeanbap Added method, queryAllWatchWarn.
|
||||||
* May 20, 2014 2536 bclement moved from edex.textdb to edex.plugin.text
|
* May 20, 2014 2536 bclement moved from edex.textdb to edex.plugin.text
|
||||||
|
* 10/28/2014 3454 bphillip Fix usage of getSession()
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author garmendariz
|
* @author garmendariz
|
||||||
|
@ -109,8 +111,15 @@ public class WatchWarnDao extends CoreDao {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public List<WatchWarn> queryAllWatchWarn() {
|
public List<WatchWarn> queryAllWatchWarn() {
|
||||||
|
|
||||||
Criteria criteria = getSession().createCriteria(WatchWarn.class);
|
Session session = getSession();
|
||||||
|
try{
|
||||||
|
Criteria criteria = session.createCriteria(WatchWarn.class);
|
||||||
|
|
||||||
return criteria.list();
|
return criteria.list();
|
||||||
|
} finally {
|
||||||
|
if(session != null){
|
||||||
|
session.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,13 +102,8 @@ script_="%{_baseline_workspace}/build/static/linux/cave/awips2VisualizeUtility.s
|
||||||
|
|
||||||
# add the license information.
|
# add the license information.
|
||||||
license_dir="%{_baseline_workspace}/rpms/legal"
|
license_dir="%{_baseline_workspace}/rpms/legal"
|
||||||
cp "${license_dir}/license.txt" \
|
|
||||||
%{_build_root}/awips2/alertviz
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
cp "${license_dir}/Master Rights File.pdf" \
|
cp "${license_dir}/Master_Rights_File.pdf" \
|
||||||
%{_build_root}/awips2/alertviz
|
%{_build_root}/awips2/alertviz
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -157,7 +152,6 @@ rm -rf ${RPM_BUILD_ROOT}
|
||||||
%dir /awips2/alertviz/features
|
%dir /awips2/alertviz/features
|
||||||
/awips2/alertviz/features/*
|
/awips2/alertviz/features/*
|
||||||
%doc /awips2/alertviz/*.pdf
|
%doc /awips2/alertviz/*.pdf
|
||||||
%doc /awips2/alertviz/license.txt
|
|
||||||
%dir /awips2/alertviz/plugins
|
%dir /awips2/alertviz/plugins
|
||||||
/awips2/alertviz/plugins/*
|
/awips2/alertviz/plugins/*
|
||||||
|
|
||||||
|
|
|
@ -51,9 +51,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -56,9 +56,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -52,16 +52,12 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
||||||
echo "\"/${COMPONENT_BUILD_DIR}/licenses/license.txt\"" \
|
echo "\"/${COMPONENT_BUILD_DIR}/licenses/Master_Rights_File.pdf\"" \
|
||||||
>> %{_topdir}/BUILD/component-files.txt
|
|
||||||
echo "\"/${COMPONENT_BUILD_DIR}/licenses/Master Rights File.pdf\"" \
|
|
||||||
>> %{_topdir}/BUILD/component-files.txt
|
>> %{_topdir}/BUILD/component-files.txt
|
||||||
echo "\"/${COMPONENT_BUILD_DIR}/licenses/FOSS_licenses.tar\"" \
|
echo "\"/${COMPONENT_BUILD_DIR}/licenses/FOSS_licenses.tar\"" \
|
||||||
>> %{_topdir}/BUILD/component-files.txt
|
>> %{_topdir}/BUILD/component-files.txt
|
||||||
|
@ -129,4 +125,4 @@ echo -e "\e[1;34m\| The AWIPS II IRT Installation Has Been Successfully Removed\
|
||||||
echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
|
echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
%files -f component-files.txt
|
%files -f component-files.txt
|
||||||
|
|
|
@ -388,6 +388,8 @@ if [ ${_myHost} == "cpsbn1" -o ${_myHost} == "cpsbn2" ] ; then
|
||||||
echo "NETMASK3=255.255.255.255" >> ${_route_eth1}
|
echo "NETMASK3=255.255.255.255" >> ${_route_eth1}
|
||||||
echo "ADDRESS4=224.0.1.5" >> ${_route_eth1}
|
echo "ADDRESS4=224.0.1.5" >> ${_route_eth1}
|
||||||
echo "NETMASK4=255.255.255.255" >> ${_route_eth1}
|
echo "NETMASK4=255.255.255.255" >> ${_route_eth1}
|
||||||
|
echo "ADDRESS5=224.0.1.6" >> ${_route_eth1}
|
||||||
|
echo "NETMASK5=255.255.255.255" >> ${_route_eth1}
|
||||||
|
|
||||||
# restart networking
|
# restart networking
|
||||||
/sbin/service network restart
|
/sbin/service network restart
|
||||||
|
|
|
@ -738,12 +738,19 @@ HDS ^(JUTX(([2-4]1)|53)) (.{4}) (..)(..)(..)
|
||||||
NGRID ^(E[HS-V][A-DG-KST][B-T]01) (KWBW) (..)(..)(..)
|
NGRID ^(E[HS-V][A-DG-KST][B-T]01) (KWBW) (..)(..)(..)
|
||||||
FILE -overwrite -log -close -edex /data_store/grib2/(\3:yyyy)(\3:mm)\3/\4/RTOFS/\1_\2_\3\4\5_(seq).grib2.%Y%m%d%H
|
FILE -overwrite -log -close -edex /data_store/grib2/(\3:yyyy)(\3:mm)\3/\4/RTOFS/\1_\2_\3\4\5_(seq).grib2.%Y%m%d%H
|
||||||
|
|
||||||
# ESTOFS
|
# ESTOFS - Pattern provided by Joshua.Watson.
|
||||||
# Pattern provided by Joshua.Watson.
|
#NGRID ^(E[EHC][IP][A-Z]88) (KWBM) (..)(..)(..)[^!]*!(grib|grib2)/[^/]*/([^/]*)/#([^/]*)/([0-9]{8})([0-9]{4})(F[0-9]{3})/([^/]*)
|
||||||
NGRID ^(E[EHC][IP][A-Z]88) (KWBM) (..)(..)(..)[^!]*!(grib|grib2)/[^/]*/([^/]*)/#([^/]*)/([0-9]{8})([0-9]{4})(F[0-9]{3})/([^/]*)
|
# FILE -overwrite -log -close -edex /data_store/\6/(\3:yyyy)(\3:mm)\3/\4/\7/GRID\8/\(10)Z_\(11)_\(12)-\1_\2_\3\4\5_(seq).\6.%Y%m%d%H
|
||||||
FILE -overwrite -log -close -edex /data_store/\6/(\3:yyyy)(\3:mm)\3/\4/\7/GRID\8/\(10)Z_\(11)_\(12)-\1_\2_\3\4\5_(seq).\6.%Y%m%d%H
|
|
||||||
|
|
||||||
# HRRR
|
# ESTOFS workaround until Unidata grib tables are updated
|
||||||
# Pattern provided by Joshua.Watson.
|
NGRID ^(E[EHC][IP][A-Z]88) (KWBM) (..)(..)(..)
|
||||||
NGRID ^(Y.C[A-MZ][089][0-9]) (KWBY) (..)(..)(..)[^!]*!(grib|grib2)/[^/]*/([^/]*)/#([^/]*)/([0-9]{8})([0-9]{4})(F[0-9]{3})/([^/]*)
|
FILE -overwrite -log -close -edex /data_store/grib2/(\3:yyyy)(\3:mm)\3/\4/ESTOFS/GRID255/\1_\2_\3\4\5_(seq).grib2.%Y%m%d%H
|
||||||
FILE -overwrite -log -close -edex /data_store/\6/(\3:yyyy)(\3:mm)\3/\4/\7/GRID\8/\(10)Z_\(11)_\(12)-\1_\2_\3\4\5_(seq).\6.%Y%m%d%H
|
|
||||||
|
# HRRR - Pattern provided by Joshua.Watson.
|
||||||
|
#NGRID ^(Y.C[A-MZ][05789][0-9]) (KWBY) (..)(..)(..)[^!]*!(grib|grib2)/[^/]*/([^/]*)/#([^/]*)/([0-9]{8})([0-9]{4})(F[0-9]{3})/([^/]*)
|
||||||
|
# FILE -overwrite -log -close -edex /data_store/\6/(\3:yyyy)(\3:mm)\3/\4/\7/GRID\8/\(10)Z_\(11)_\(12)-\1_\2_\3\4\5_(seq).\6.%Y%m%d%H
|
||||||
|
|
||||||
|
# HRRR Workaround until noaaport ingest grib tables are updated.
|
||||||
|
NGRID ^(Y.C[A-MZ][05789][0-9]) (KWBY) (..)(..)(..)
|
||||||
|
FILE -overwrite -log -close -edex /data_store/grib2/(\3:yyyy)(\3:mm)\3/\4/HRRR/GRID184/\1_\2_\3\4\5_(seq).grib2.%Y%m%d%H
|
||||||
|
#
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/data/ldm/logs/ldmd.log /data/ldm/logs/nwstg.log /data/ldm/logs/goes.log /data/ldm/logs/nwstg2.log /data/ldm/logs/oconus.log /data/ldm/logs/polarsat.log {
|
/data/ldm/logs/ldmd.log /data/ldm/logs/nwstg.log /data/ldm/logs/goes_add.log /data/ldm/logs/nwstg2.log /data/ldm/logs/oconus.log /data/ldm/logs/polarsat.log {
|
||||||
missingok
|
missingok
|
||||||
compress
|
compress
|
||||||
notifempty
|
notifempty
|
||||||
|
|
|
@ -74,9 +74,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -100,9 +100,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -104,9 +104,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -99,9 +99,7 @@ function copyLegal()
|
||||||
|
|
||||||
mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
}
|
}
|
||||||
pushd . > /dev/null
|
pushd . > /dev/null
|
||||||
|
|
|
@ -101,9 +101,7 @@ function copyLegal()
|
||||||
|
|
||||||
mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
mkdir -p %{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
%{_build_root}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
}
|
}
|
||||||
pushd . > /dev/null
|
pushd . > /dev/null
|
||||||
|
|
|
@ -64,9 +64,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -139,9 +139,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -52,9 +52,7 @@ function copyLegal()
|
||||||
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
tar -cjf %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
%{_baseline_workspace}/rpms/legal/FOSS_licenses/
|
||||||
|
|
||||||
cp %{_baseline_workspace}/rpms/legal/license.txt \
|
cp "%{_baseline_workspace}/rpms/legal/Master_Rights_File.pdf" \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
|
||||||
cp "%{_baseline_workspace}/rpms/legal/Master Rights File.pdf" \
|
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
cp %{_baseline_workspace}/rpms/legal/FOSS_licenses.tar \
|
||||||
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
${RPM_BUILD_ROOT}/${COMPONENT_BUILD_DIR}/licenses
|
||||||
|
|
|
@ -49,6 +49,22 @@ function buildQPID()
|
||||||
|
|
||||||
pushd . > /dev/null 2>&1
|
pushd . > /dev/null 2>&1
|
||||||
|
|
||||||
|
# ensure that the destination rpm directories exist
|
||||||
|
if [ ! -d ${AWIPSII_TOP_DIR}/RPMS/noarch ]; then
|
||||||
|
mkdir -p ${AWIPSII_TOP_DIR}/RPMS/noarch
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ensure that the destination rpm directories exist
|
||||||
|
if [ ! -d ${AWIPSII_TOP_DIR}/RPMS/x86_64 ]; then
|
||||||
|
mkdir -p ${AWIPSII_TOP_DIR}/RPMS/x86_64
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
cd ${WORKSPACE}/rpms/awips2.qpid/0.18/deploy.builder
|
cd ${WORKSPACE}/rpms/awips2.qpid/0.18/deploy.builder
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
echo "ERROR: Failed to build the qpid rpms."
|
echo "ERROR: Failed to build the qpid rpms."
|
||||||
|
@ -61,14 +77,6 @@ function buildQPID()
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure that the destination rpm directories exist
|
|
||||||
if [ ! -d ${AWIPSII_TOP_DIR}/RPMS/noarch ]; then
|
|
||||||
mkdir -p ${AWIPSII_TOP_DIR}/RPMS/noarch
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Copy the 0.18 qpid rpms
|
# Copy the 0.18 qpid rpms
|
||||||
cd ${WORKSPACE}/rpms/awips2.qpid/0.18/RPMS/noarch
|
cd ${WORKSPACE}/rpms/awips2.qpid/0.18/RPMS/noarch
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
|
@ -90,6 +98,11 @@ function buildQPID()
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
#build 0.28
|
||||||
|
export AWIPS_II_TOP_DIR
|
||||||
|
cd ${WORKSPACE}/installers/RPMs/qpid-java-broker-0.28
|
||||||
|
/bin/bash build.sh
|
||||||
|
|
||||||
popd > /dev/null 2>&1
|
popd > /dev/null 2>&1
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
48
rpms/legal/FOSS_licenses/Academic_Free_License_2.1.txt
Normal file
48
rpms/legal/FOSS_licenses/Academic_Free_License_2.1.txt
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
The Academic Free License
|
||||||
|
v. 2.1
|
||||||
|
|
||||||
|
This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following notice immediately following the copyright notice for the Original Work:
|
||||||
|
|
||||||
|
Licensed under the Academic Free License version 2.1
|
||||||
|
|
||||||
|
1) Grant of Copyright License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license to do the following:
|
||||||
|
|
||||||
|
a) to reproduce the Original Work in copies;
|
||||||
|
|
||||||
|
b) to prepare derivative works ("Derivative Works") based upon the Original Work;
|
||||||
|
|
||||||
|
c) to distribute copies of the Original Work and Derivative Works to the public;
|
||||||
|
|
||||||
|
d) to perform the Original Work publicly; and
|
||||||
|
|
||||||
|
e) to display the Original Work publicly.
|
||||||
|
|
||||||
|
2) Grant of Patent License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, to make, use, sell and offer for sale the Original Work and Derivative Works.
|
||||||
|
|
||||||
|
3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor hereby agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work, and by publishing the address of that information repository in a notice immediately following the copyright notice that applies to the Original Work.
|
||||||
|
|
||||||
|
4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior written permission of the Licensor. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor except as expressly stated herein. No patent license is granted to make, use, sell or offer to sell embodiments of any patent claims other than the licensed claims defined in Section 2. No right is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any Original Work that Licensor otherwise would have a right to license.
|
||||||
|
|
||||||
|
5) This section intentionally omitted.
|
||||||
|
|
||||||
|
6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work.
|
||||||
|
|
||||||
|
7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately proceeding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to Original Work is granted hereunder except under this disclaimer.
|
||||||
|
|
||||||
|
8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to any person for any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to liability for death or personal injury resulting from Licensor's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You.
|
||||||
|
|
||||||
|
9) Acceptance and Termination. If You distribute copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. Nothing else but this License (or another written agreement between Licensor and You) grants You permission to create Derivative Works based upon the Original Work or to exercise any of the rights granted in Section 1 herein, and any attempt to do so except under the terms of this License (or another written agreement between Licensor and You) is expressly prohibited by U.S. copyright law, the equivalent laws of other countries, and by international treaty. Therefore, by exercising any of the rights granted to You in Section 1 herein, You indicate Your acceptance of this License and all of its terms and conditions.
|
||||||
|
|
||||||
|
10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware.
|
||||||
|
|
||||||
|
11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et seq., the equivalent laws of other countries, and international treaty. This section shall survive the termination of this License.
|
||||||
|
|
||||||
|
12) Attorneys Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License.
|
||||||
|
|
||||||
|
13) Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable.
|
||||||
|
|
||||||
|
14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You.
|
||||||
|
|
||||||
|
This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved. Permission is hereby granted to copy and distribute this license without modification. This license may not be modified without the express written permission of its copyright owner.
|
8
rpms/legal/FOSS_licenses/Aleksei_Valikov_c_2006-2009.txt
Normal file
8
rpms/legal/FOSS_licenses/Aleksei_Valikov_c_2006-2009.txt
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
Copyright (c) 2006-2009, Aleksei Valikov
|
||||||
|
All rights reserved.
|
||||||
|
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||||
|
• Redistributions of source code must retain the abov e copyright notice, this list of conditions and the following disclaimer.
|
||||||
|
• Redistributions in binary form must reproduce the a bove copyright notice, this list of conditions and the following disclaimer in the docu mentation and/or other materials provided with the distribution.
|
||||||
|
• Neither the name of Alexey Valikov nor the name of Highsource nor the names of its contributors may be used to endorse or promote prod ucts derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WAR RANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTI ES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOS E ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, IN CIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLU DING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERV ICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HO WEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT , STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWI SE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF AD VISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
17
rpms/legal/FOSS_licenses/h5py/LICENSE.txt → rpms/legal/FOSS_licenses/Andrew_Collette_c_2008.txt
Executable file → Normal file
17
rpms/legal/FOSS_licenses/h5py/LICENSE.txt → rpms/legal/FOSS_licenses/Andrew_Collette_c_2008.txt
Executable file → Normal file
|
@ -4,21 +4,26 @@ Copyright (c) 2008 Andrew Collette
|
||||||
http://h5py.alfven.org
|
http://h5py.alfven.org
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
modification, are permitted provided that the following conditions are
|
modification, are permitted provided that the following conditions are
|
||||||
met:
|
met:
|
||||||
|
|
||||||
|
|
||||||
a. Redistributions of source code must retain the above copyright
|
a. Redistributions of source code must retain the above copyright
|
||||||
notice, this list of conditions and the following disclaimer.
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
|
||||||
b. Redistributions in binary form must reproduce the above copyright
|
b. Redistributions in binary form must reproduce the above copyright
|
||||||
notice, this list of conditions and the following disclaimer in the
|
notice, this list of conditions and the following disclaimer in the
|
||||||
documentation and/or other materials provided with the
|
documentation and/or other materials provided with the
|
||||||
distribution.
|
distribution.
|
||||||
|
|
||||||
|
|
||||||
c. Neither the name of the author nor the names of contributors may
|
c. Neither the name of the author nor the names of contributors may
|
||||||
be used to endorse or promote products derived from this software
|
be used to endorse or promote products derived from this software
|
||||||
without specific prior written permission.
|
without specific prior written permission.
|
||||||
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
13
rpms/legal/FOSS_licenses/Antler_3_License.txt
Normal file
13
rpms/legal/FOSS_licenses/Antler_3_License.txt
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
ANTLR 3 License
|
||||||
|
|
||||||
|
[The BSD License]
|
||||||
|
Copyright (c) 2010 Terence Parr
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||||
|
Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
0
rpms/legal/FOSS_licenses/apache/Apache1.1.txt → rpms/legal/FOSS_licenses/Apache_1.1.txt
Executable file → Normal file
0
rpms/legal/FOSS_licenses/apache/Apache1.1.txt → rpms/legal/FOSS_licenses/Apache_1.1.txt
Executable file → Normal file
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue