Merge branch 'ncep_14.2.1-n' into ncep_14.2.2-n
Former-commit-id:8756591c81
[formerly2a0d12d6bc
] [formerly24948862d5
] [formerly24948862d5
[formerly4d2c788c55
]] [formerly8756591c81
[formerly2a0d12d6bc
] [formerly24948862d5
] [formerly24948862d5
[formerly4d2c788c55
]] [formerlyb3564f7131
[formerly24948862d5
[formerly4d2c788c55
] [formerlyb3564f7131
[formerly 13a8016d570ac39d22b9a1fa4b043ed20f5475ea]]]]] Former-commit-id:b3564f7131
Former-commit-id:41108ccd20
[formerlyf027643609
] [formerlyd3f1be7c4a
] [formerly 9a29ab0f26129175be7b95b68e11d003ca2c37c6 [formerly 63e6a4b2abe74eb1f0691ae4f17085bb7626e6db] [formerlyd3f1be7c4a
[formerly0580c95605
]]] Former-commit-id: e244c946e24a8e804a5b251f819501404239930e [formerly e3e520a1a9425e410cf50a7ff98b6ce166b73686] [formerlyaedd038e2b
[formerlybec66e8565
]] Former-commit-id:aedd038e2b
Former-commit-id:bb9d85077b
This commit is contained in:
commit
91b738b913
751 changed files with 147969 additions and 113282 deletions
|
@ -218,9 +218,14 @@
|
|||
<taskdef resource="net/sf/antcontrib/antlib.xml"
|
||||
classpath="${builder}/../lib/ant/ant-contrib-1.0b3.jar" />
|
||||
|
||||
<taskdef name="xmltask"
|
||||
classpath="${builder}/../lib/ant/xmltask-v1.15.1.jar"
|
||||
classname="com.oopsconsultancy.xmltask.ant.XmlTask" />
|
||||
<taskdef name="xmltask"
|
||||
classname="com.oopsconsultancy.xmltask.ant.XmlTask">
|
||||
<classpath>
|
||||
<pathelement path="${builder}/../lib/ant/xmltask-v1.15.1.jar"/>
|
||||
<pathelement path="${builder}/../lib/ant/xalan-2.7.1.jar"/>
|
||||
</classpath>
|
||||
</taskdef>
|
||||
|
||||
|
||||
<!-- Generate the 32-bit CAVE ini files -->
|
||||
<var name="cave.arch" value="arch.x86" />
|
||||
|
|
BIN
cave/build/lib/ant/xalan-2.7.1.jar
Normal file
BIN
cave/build/lib/ant/xalan-2.7.1.jar
Normal file
Binary file not shown.
|
@ -189,7 +189,7 @@
|
|||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="gov.noaa.nws.ncep.common.dataplugin.ntrans"
|
||||
|
@ -212,4 +212,11 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.remote.script"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -315,7 +315,14 @@
|
|||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="gov.noaa.nws.ncep.viz.timeseries"
|
||||
id="gov.noaa.nws.ncep.viz.ui.remotescript"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="gov.noaa.nws.ncep.viz.rsc.timeseries"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
|
|
158
deltaScripts/14.2.1-n3/addNcepNwxAdminMessageGpTable.sql
Normal file
158
deltaScripts/14.2.1-n3/addNcepNwxAdminMessageGpTable.sql
Normal file
|
@ -0,0 +1,158 @@
|
|||
-- create nwx.adminmessages table
|
||||
DROP TABLE IF EXISTS nwx.adminmessages CASCADE;
|
||||
CREATE TABLE nwx.adminmessages(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productname varchar(60) NOT NULL,
|
||||
producttablename varchar(30) NOT NULL,
|
||||
producttype varchar(20) NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.cmcam table
|
||||
DROP TABLE IF EXISTS nwx.cmcam CASCADE;
|
||||
CREATE TABLE nwx.cmcam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.ncfam table
|
||||
DROP TABLE IF EXISTS nwx.ncfam CASCADE;
|
||||
CREATE TABLE nwx.ncfam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdisam table
|
||||
DROP TABLE IF EXISTS nwx.nesdisam CASCADE;
|
||||
CREATE TABLE nwx.nesdisam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdispm table
|
||||
DROP TABLE IF EXISTS nwx.nesdispm CASCADE;
|
||||
CREATE TABLE nwx.nesdispm(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.snwstgam table
|
||||
DROP TABLE IF EXISTS nwx.nwstgam CASCADE;
|
||||
CREATE TABLE nwx.nwstgam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- Drop nwx.sdm table
|
||||
DROP TABLE IF EXISTS nwx.sdm CASCADE;
|
||||
|
||||
-- create nwx.sdmam table
|
||||
DROP TABLE IF EXISTS nwx.sdmam CASCADE;
|
||||
CREATE TABLE nwx.sdmam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmim table
|
||||
DROP TABLE IF EXISTS nwx.sdmim CASCADE;
|
||||
CREATE TABLE nwx.sdmim(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmdhm table
|
||||
DROP TABLE IF EXISTS nwx.sdmdhm CASCADE;
|
||||
CREATE TABLE nwx.sdmdhm(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO nwx.datatypegrouplist (datatypegroupname,datatypegrouptablename) values ('Admin Messages','nwx.adminmessages');
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('SDM Administrative Messages','nwx.sdmam','sdmam');
|
||||
INSERT INTO nwx.sdmam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS42','KWNO','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('SDM International Messages','nwx.sdmim','sdmim');
|
||||
INSERT INTO nwx.sdmim (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NPXX10','KWNO','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('SDM DHS Hazards Messages','nwx.sdmdhm','sdmdhm');
|
||||
INSERT INTO nwx.sdmdhm (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS71','KWNO','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('CMC Administrative Messages','nwx.cmcam','cmcam');
|
||||
INSERT INTO nwx.cmcam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOCN05', 'CWAO','MONTREAL_VAAC','CN','CN',45.47,-73.75,-9999);
|
||||
INSERT INTO nwx.cmcam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('AACN01', 'CWAO','MONTREAL_VAAC','CN','CN',45.47,-73.75,-9999);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NWSTG Administrative Messages','nwx.nwstgam','nwstgam');
|
||||
INSERT INTO nwx.nwstgam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOXX01', 'KWBC','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NCF Administrative Messages','nwx.ncfam','ncfam');
|
||||
INSERT INTO nwx.ncfam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS72', 'KNCF','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NESDIS Product Anomaly Messages','nwx.nesdispm','nesdispm');
|
||||
INSERT INTO nwx.nesdispm (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS71', 'KNES','NESDIS','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NESDIS Administrative Messages','nwx.nesdisam','nesdisam');
|
||||
INSERT INTO nwx.nesdisam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS72', 'KNES','NESDIS','MD','US',38.82,-76.87,86);
|
||||
|
||||
DELETE from nwx.hpcproducts where productname='SDM Messages';
|
||||
DELETE from nwx.hpcproducts where productname='International Messages';
|
||||
DROP TABLE nwx.sdm;
|
||||
DROP TABLE nwx.intl;
|
||||
|
||||
\connect metadata awips
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (181,'sdmdhm','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (182,'cmcam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (183,'nwstgam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (184,'ncfam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (185,'nesdispm','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (186,'nesdisam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (185,'sdmam','B');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (186,'sdmim','W');
|
||||
|
5
deltaScripts/14.2.1-n3/updateNcepNwxAdminTables.sh
Normal file
5
deltaScripts/14.2.1-n3/updateNcepNwxAdminTables.sh
Normal file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
PSQL=/awips2/psql/bin/psql
|
||||
DIR=`dirname $0`
|
||||
${PSQL} -U awips -d ncep -f ${DIR}/addNcepNwxAdminMessageGpTable.sql
|
|
@ -114,6 +114,10 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.dataprovideragent.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.remote.script.feature" />
|
||||
</antcall>
|
||||
|
||||
<!-- SPECIAL CASE -->
|
||||
<if>
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
-- create nwx.adminmessages table
|
||||
DROP TABLE IF EXISTS nwx.adminmessages CASCADE;
|
||||
CREATE TABLE nwx.adminmessages(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productname varchar(60) NOT NULL,
|
||||
producttablename varchar(30) NOT NULL,
|
||||
producttype varchar(20) NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.cmcam table
|
||||
DROP TABLE IF EXISTS nwx.cmcam CASCADE;
|
||||
CREATE TABLE nwx.cmcam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.ncfam table
|
||||
DROP TABLE IF EXISTS nwx.ncfam CASCADE;
|
||||
CREATE TABLE nwx.ncfam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdisam table
|
||||
DROP TABLE IF EXISTS nwx.nesdisam CASCADE;
|
||||
CREATE TABLE nwx.nesdisam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdispm table
|
||||
DROP TABLE IF EXISTS nwx.nesdispm CASCADE;
|
||||
CREATE TABLE nwx.nesdispm(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.snwstgam table
|
||||
DROP TABLE IF EXISTS nwx.nwstgam CASCADE;
|
||||
CREATE TABLE nwx.nwstgam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- Drop nwx.sdm table
|
||||
DROP TABLE IF EXISTS nwx.sdm CASCADE;
|
||||
|
||||
-- create nwx.sdmam table
|
||||
DROP TABLE IF EXISTS nwx.sdmam CASCADE;
|
||||
CREATE TABLE nwx.sdmam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmim table
|
||||
DROP TABLE IF EXISTS nwx.sdmim CASCADE;
|
||||
CREATE TABLE nwx.sdmim(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmdhm table
|
||||
DROP TABLE IF EXISTS nwx.sdmdhm CASCADE;
|
||||
CREATE TABLE nwx.sdmdhm(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO nwx.datatypegrouplist (datatypegroupname,datatypegrouptablename) values ('Admin Messages','nwx.adminmessages');
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('SDM Administrative Messages','nwx.sdmam','sdmam');
|
||||
INSERT INTO nwx.sdmam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS42','KWNO','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('SDM International Messages','nwx.sdmim','sdmim');
|
||||
INSERT INTO nwx.sdmim (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NPXX10','KWNO','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('SDM DHS Hazards Messages','nwx.sdmdhm','sdmdhm');
|
||||
INSERT INTO nwx.sdmdhm (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS71','KWNO','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('CMC Administrative Messages','nwx.cmcam','cmcam');
|
||||
INSERT INTO nwx.cmcam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOCN05', 'CWAO','MONTREAL_VAAC','CN','CN',45.47,-73.75,-9999);
|
||||
INSERT INTO nwx.cmcam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('AACN01', 'CWAO','MONTREAL_VAAC','CN','CN',45.47,-73.75,-9999);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NWSTG Administrative Messages','nwx.nwstgam','nwstgam');
|
||||
INSERT INTO nwx.nwstgam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOXX01', 'KWBC','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NCF Administrative Messages','nwx.ncfam','ncfam');
|
||||
INSERT INTO nwx.ncfam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS72', 'KNCF','NMC','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NESDIS Product Anomaly Messages','nwx.nesdispm','nesdispm');
|
||||
INSERT INTO nwx.nesdispm (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS71', 'KNES','NESDIS','MD','US',38.82,-76.87,86);
|
||||
INSERT INTO nwx.adminmessages(productname,producttablename,producttype) values ('NESDIS Administrative Messages','nwx.nesdisam','nesdisam');
|
||||
INSERT INTO nwx.nesdisam (productid,stnid,stnname,state,country,latitude,longitude,elevation) values ('NOUS72', 'KNES','NESDIS','MD','US',38.82,-76.87,86);
|
||||
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (181,'sdmdhm','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (182,'cmcam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (183,'nwstgam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (184,'ncfam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (185,'nesdispm','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (186,'nesdisam','W');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (185,'sdmam','B');
|
||||
INSERT INTO awips.nctext_inputfile_type VALUES (186,'sdmim','W');
|
||||
|
||||
DELETE from nwx.hpcproducts where productname='SDM Messages';
|
||||
DELETE from nwx.hpcproducts where productname='International Messages';
|
||||
DROP TABLE nwx.sdm;
|
||||
DROP TABLE nwx.intl;
|
||||
|
|
@ -195,6 +195,15 @@ producttablename varchar(30) NOT NULL,
|
|||
producttype varchar(20) NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.adminmessages table
|
||||
DROP TABLE IF EXISTS nwx.adminmessages CASCADE;
|
||||
CREATE TABLE nwx.adminmessages(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productname varchar(60) NOT NULL,
|
||||
producttablename varchar(30) NOT NULL,
|
||||
producttype varchar(20) NOT NULL
|
||||
);
|
||||
|
||||
-------------------------------------------------
|
||||
-- create nwx bulletin tables
|
||||
-- ---------------------------------------------
|
||||
|
@ -355,6 +364,20 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.cmcam table
|
||||
DROP TABLE IF EXISTS nwx.cmcam CASCADE;
|
||||
CREATE TABLE nwx.cmcam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
--create nwx.consig table
|
||||
DROP TABLE IF EXISTS nwx.consig CASCADE;
|
||||
CREATE TABLE nwx.consig(
|
||||
|
@ -1016,6 +1039,48 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.ncfam table
|
||||
DROP TABLE IF EXISTS nwx.ncfam CASCADE;
|
||||
CREATE TABLE nwx.ncfam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdisam table
|
||||
DROP TABLE IF EXISTS nwx.nesdisam CASCADE;
|
||||
CREATE TABLE nwx.nesdisam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.nesdispam table
|
||||
DROP TABLE IF EXISTS nwx.nesdispam CASCADE;
|
||||
CREATE TABLE nwx.nesdispam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.ngmgd table
|
||||
DROP TABLE IF EXISTS nwx.ngmgd CASCADE;
|
||||
CREATE TABLE nwx.ngmgd(
|
||||
|
@ -1058,6 +1123,20 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.snwstgam table
|
||||
DROP TABLE IF EXISTS nwx.nwstgam CASCADE;
|
||||
CREATE TABLE nwx.nwstgam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.off table
|
||||
DROP TABLE IF EXISTS nwx.off CASCADE;
|
||||
CREATE TABLE nwx.off(
|
||||
|
@ -1400,9 +1479,37 @@ id SERIAL PRIMARY KEY,
|
|||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdm table
|
||||
DROP TABLE IF EXISTS nwx.sdm CASCADE;
|
||||
CREATE TABLE nwx.sdm(
|
||||
-- create nwx.sdmam table
|
||||
DROP TABLE IF EXISTS nwx.sdmam CASCADE;
|
||||
CREATE TABLE nwx.sdmam(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmim table
|
||||
DROP TABLE IF EXISTS nwx.sdmim CASCADE;
|
||||
CREATE TABLE nwx.sdmim(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
stnname varchar(32) NOT NULL,
|
||||
state varchar(2) NOT NULL,
|
||||
country varchar(2) NOT NULL,
|
||||
latitude double precision NOT NULL,
|
||||
longitude double precision NOT NULL,
|
||||
elevation int NOT NULL
|
||||
);
|
||||
|
||||
-- create nwx.sdmdhm table
|
||||
DROP TABLE IF EXISTS nwx.sdmdhm CASCADE;
|
||||
CREATE TABLE nwx.sdmdhm(
|
||||
id SERIAL PRIMARY KEY,
|
||||
productid varchar(6) NOT NULL,
|
||||
stnid varchar(8) NOT NULL,
|
||||
|
|
|
@ -25,3 +25,4 @@ ${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxReconCARCAHBulletinTable
|
|||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxSPCProductsBulletinTables.sql >> ${5} 2>&1
|
||||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxTropicalPacificBulletinTables.sql >> ${5} 2>&1
|
||||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxVolcanoProductsBulletinTables.sql >> ${5} 2>&1
|
||||
${1}/bin/psql -d ncep -U ${3} -q -p ${2} -f ${4}/loadNwxAdminMessagesBulletinTables.sql >> ${5} 2>&1
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
-- load nwx.sdmam table
|
||||
copy nwx.sdmam (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOUS42,KWNO,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.sdmim table
|
||||
copy nwx.sdmim (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NPXX10,KWNO,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.sdmdhm table
|
||||
copy nwx.sdmdhm (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOUS71,KWNO,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.nwstgam table
|
||||
copy nwx.nwstgam (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOXX01,KWBC,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.ncfam table
|
||||
copy nwx.ncfam (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOUS72,KNCF,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.nesdispm table
|
||||
copy nwx.nesdispm (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOUS71,KNES,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.nesdisam table
|
||||
copy nwx.nesdisam (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOUS72,KNES,NMC,MD,US,38.82,-76.87,86
|
||||
\.
|
||||
|
||||
-- load nwx.cmcam table
|
||||
copy nwx.cmcam (productid,stnid,stnname,state,country,latitude,longitude,elevation) FROM stdin with delimiter as ',';
|
||||
NOCN05,CWAO,MONTREAL_VAAC,CN,CN,45.47,-73.75,-9999
|
||||
AACN01,CWAO,MONTREAL_VAAC,CN,CN,45.47,-73.75,-9999
|
||||
\.
|
|
@ -64,6 +64,19 @@ HPC Heat Index,nwx.hpcheatindex
|
|||
CPC Products,nwx.cpcproducts
|
||||
Volcano Products,nwx.volcanoproducts
|
||||
Fire Weather Products,nwx.fireweatherproducts
|
||||
Admin Messages,nwx.adminmessages
|
||||
\.
|
||||
|
||||
--load nwx.adminmessages table
|
||||
copy nwx.adminmessages (productname,producttablename,producttype) FROM stdin with delimiter as ',' ;
|
||||
SDM Administrative Messages,nwx.sdmam,sdm
|
||||
SDM International Messages,nwx.sdmim,intl
|
||||
SDM DHS Hazards Messages,nwx.sdmdhm
|
||||
CMC Administrative Messages,nwx.cmcam
|
||||
NWSTG Administrative Messages,nwx.nwstgam
|
||||
NCF Administrative Messages,nwx.ncfam
|
||||
NESDIS Product Anomaly Messages,nwx.nesdispam
|
||||
NESDIS Administrative Messages,nwx.nesdisam
|
||||
\.
|
||||
|
||||
--load nwx.observeddataproducts table
|
||||
|
@ -135,8 +148,6 @@ Hawaii Discussion,nwx.pmdhi,PMDHI
|
|||
Alaska Discussion,nwx.pmdak,PMDAK
|
||||
S Amer Discussion,nwx.pmdsa,PMDSA
|
||||
Caribbean Discussion,nwx.pmdca,PMDCA
|
||||
SDM Messages,nwx.sdm,sdm
|
||||
International Messages,nwx.intl,intl
|
||||
Storm Summaries,nwx.stations,storm
|
||||
Model Diag Discussion,nwx.pmdhmd,PMDHMD
|
||||
Selected Cities,nwx.scs,SCS
|
||||
|
|
|
@ -8658,7 +8658,7 @@ copy nwx.sfstns (productid,stnid,stnname,state,country,latitude,longitude,elevat
|
|||
085010;LPFL;FLORES_ISLAND;--;AZ;39.45;-31.13;29
|
||||
999999;M97;TUNICA;MS;US;34.86;-90.35;59
|
||||
999999;MDD;MIDLAND;TX;US;32.04;-102.10;854
|
||||
890090;NZSP;AMUNDSEN-SCOTT_SOUTH_POLE_STN;--;NZ;-90.00;0.00;2830
|
||||
890090;NZSP;AMUNDSEN-SCOTT_SOUTH_POLE_STN;--;NZ;-89.99;0.01;2830
|
||||
999999;PCZ;WAUPACA;WI;US;44.33;-89.01;252
|
||||
999999;RCX;LADYSMITH;WI;US;45.50;-91.00;377
|
||||
800630;SKMR;MONTERIA/LOS_GARZON;--;CO;8.82;-75.85;26
|
||||
|
@ -10043,7 +10043,7 @@ copy nwx.snstns (productid,stnid,stnname,state,country,latitude,longitude,elevat
|
|||
87860;SAVC;COMODORO_RIVADAVIA_AERO;--;AG;-45.78;-67.50;46
|
||||
88889;EGYP;MOUNT_PLEASANT_AIRPORT;--;FK;-51.81;-58.45;73
|
||||
89002;--;NEUMAYER;--;AA;-70.66;-8.25;50
|
||||
89009;--;AMUNDSEN-SCOTT;--;AA;-90.00;0.00;2835
|
||||
89009;--;AMUNDSEN-SCOTT;--;AA;-89.99;0.01;2835
|
||||
89022;--;HALLEY;--;AA;-75.50;-26.65;30
|
||||
89050;--;BELLINGSHAUSEN_AWS;--;AA;-62.20;-58.93;14
|
||||
89055;--;BASE_MARAMBIO;--;AA;-64.23;-56.71;198
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -32,7 +32,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
31,HAR,126,HARRISBURG,PA,US,40.23,-77.02
|
||||
32,DCA,306,WASHINGTON,DC,US,38.86,-77.04
|
||||
33,RIC,229,RICHMOND,VA,US,37.5,-77.32
|
||||
34,CSN,299,CASSANOVA,VA,US,38.64,-77.87
|
||||
34,CSN,299,CASANOVA,VA,US,38.64,-77.87
|
||||
35,ILM,135,WILMINGTON,NC,US,34.35,-77.87
|
||||
36,SLT,252,SLATE_RUN,PA,US,41.51,-77.97
|
||||
37,PSB,368,PHILLIPSBURG,PA,US,40.92,-77.99
|
||||
|
@ -50,7 +50,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
49,EWC,326,ELLWOOD_CITY,PA,US,40.83,-80.21
|
||||
50,ERI,92,ERIE,PA,US,42.02,-80.3
|
||||
51,MIA,176,MIAMI,FL,US,25.8,-80.3
|
||||
52,VRB,276,VERO_BEACH,FL,US,27.68,-80.49
|
||||
52,TRV,276,TREASURE,FL,US,27.68,-80.49
|
||||
53,PSK,369,DUBLIN,VA,US,37.09,-80.71
|
||||
54,AIR,280,BELLAIRE,OH,US,40.02,-80.82
|
||||
55,CLT,59,CHARLOTTE,NC,US,35.22,-80.93
|
||||
|
@ -62,7 +62,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
61,ORL,204,ORLANDO,FL,US,28.54,-81.34
|
||||
62,CRG,298,JACKSONVILLE,FL,US,30.34,-81.51
|
||||
63,EYW,96,KEY_WEST,FL,US,24.59,-81.8
|
||||
64,FMY,104,FT_MEYERS,FL,US,26.58,-81.87
|
||||
64,RSW,104,LEE_COUNTY,FL,US,26.53,-81.78
|
||||
65,SPA,380,SPARTANBURG,SC,US,35.03,-81.93
|
||||
66,HNN,339,HENDERSON,WV,US,38.75,-82.03
|
||||
67,HMV,337,HOLSTON_MOUNTAIN,TN,US,36.44,-82.13
|
||||
|
@ -83,7 +83,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
82,ROD,373,ROSEWOOD,OH,US,40.29,-84.04
|
||||
83,MBS,168,SAGINAW,MI,US,43.53,-84.08
|
||||
84,LOZ,160,LONDON,KY,US,37.03,-84.12
|
||||
85,ABY,4,ALBANY,GA,US,31.65,-84.3
|
||||
85,PZD,4,PECAN,GA,US,31.66,-84.29
|
||||
86,SSM,255,SAULT_STE_MARIE,MI,US,46.41,-84.31
|
||||
87,TLH,264,TALLAHASSEE,FL,US,30.56,-84.37
|
||||
88,ATL,19,ATLANTA,GA,US,33.63,-84.44
|
||||
|
@ -93,7 +93,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
92,LGC,350,LA_GRANGE,GA,US,33.05,-85.21
|
||||
93,GRR,332,GRAND_RAPIDS,MI,US,42.79,-85.5
|
||||
94,TVC,270,TRAVERSE_CITY,MI,US,44.67,-85.55
|
||||
95,LOU,159,LOUISVILLE,KY,US,38.1,-85.58
|
||||
95,IIU,159,LOUISVILLE,KY,US,38.1,-85.58
|
||||
96,MKG,179,MUSKEGON,MI,US,43.17,-86.04
|
||||
97,PMM,366,PULLMAN,MI,US,42.47,-86.11
|
||||
98,GIJ,330,NILES,MI,US,41.77,-86.32
|
||||
|
@ -115,7 +115,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
114,SJI,378,SEMMNES,AL,US,30.73,-88.36
|
||||
115,IGB,133,BIGBEE,MS,US,33.48,-88.52
|
||||
116,MEI,172,MERIDIAN,MS,US,32.38,-88.8
|
||||
117,DEC,70,DECATUR,IL,US,39.74,-88.86
|
||||
117,AXC,70,ADDERS,IL,US,39.74,-88.86
|
||||
118,YQT,393,THUNDER_BAY,ON,CN,48.37,-89.32
|
||||
119,DYR,83,DYERSBURG,TN,US,36.02,-89.32
|
||||
120,RHI,228,RHINELANDER,WI,US,45.63,-89.45
|
||||
|
@ -123,8 +123,8 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
122,DLL,310,DELLS,WI,US,43.55,-89.76
|
||||
123,MEM,173,MEMPHIS,TN,US,35.06,-89.98
|
||||
124,LEV,349,GRAND_ISLE,LA,US,29.18,-90.1
|
||||
125,JAN,142,JACKSON,MS,US,32.51,-90.17
|
||||
126,MSY,195,NEW_ORLEANS,LA,US,30,-90.27
|
||||
125,MHZ,142,MAGNOLIA,MS,US,32.43,-90.1
|
||||
126,HRV,195,HARVEY,LA,US,29.85,-90
|
||||
127,FAM,97,FARMINGTON,MO,US,37.67,-90.23
|
||||
128,MCB,169,MC_COMB,MS,US,31.3,-90.26
|
||||
129,SQS,381,SIDON,MS,US,33.46,-90.28
|
||||
|
@ -156,7 +156,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
155,FSM,108,FT_SMITH,AR,US,35.38,-94.27
|
||||
156,FOD,105,FT_DODGE,IA,US,42.61,-94.29
|
||||
157,BUM,45,BUTLER,MO,US,38.27,-94.49
|
||||
158,MKC,177,KANSAS_CITY,MO,US,39.28,-94.59
|
||||
158,MCI,177,KANSAS_CITY,MO,US,39.29,-94.74
|
||||
159,LFK,155,LUFKIN,TX,US,31.16,-94.72
|
||||
160,GGG,115,LONGVIEW,TX,US,32.42,-94.75
|
||||
161,BJI,33,BEMIDJI,MN,US,47.58,-95.02
|
||||
|
@ -170,7 +170,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
169,PSX,219,PALACIOS,TX,US,28.76,-96.31
|
||||
170,FSD,107,SIOUX_FALLS,SD,US,43.65,-96.78
|
||||
171,FAR,98,FARGO,ND,US,46.75,-96.85
|
||||
172,DFW,72,DALLAS-FT_WORTH,TX,US,32.87,-97.03
|
||||
172,TTT,72,MAVERICK,TX,US,32.87,-97.04
|
||||
173,ADM,8,ARDMORE,OK,US,34.21,-97.17
|
||||
174,GFK,114,GRAND_FORKS,ND,US,47.95,-97.19
|
||||
175,YWG,397,WINNIPEG,MB,CN,49.9,-97.23
|
||||
|
@ -180,7 +180,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
179,ICT,132,WICHITA,KS,US,37.75,-97.58
|
||||
180,OKC,198,OKLAHOMA_CITY,OK,US,35.36,-97.61
|
||||
181,SLN,251,SALINA,KS,US,38.93,-97.62
|
||||
182,AUS,20,AUSTIN,TX,US,30.3,-97.7
|
||||
182,CWK,20,CENTEX,TX,US,30.38,-97.53
|
||||
183,END,321,VANCE_AFB,OK,US,36.35,-97.92
|
||||
184,OBH,358,WOLBACH,NE,US,41.38,-98.35
|
||||
185,ABR,3,ABERDEEN,SD,US,45.42,-98.37
|
||||
|
@ -190,7 +190,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
189,LRD,161,LAREDO,TX,US,27.48,-99.42
|
||||
190,JCT,144,JUNCTION,TX,US,30.6,-99.82
|
||||
191,ABI,1,ABILENE,TX,US,32.48,-99.86
|
||||
192,GAG,110,GAGE,OK,US,36.34,-99.88
|
||||
192,MMB,110,MITBEE,OK,US,36.34,-99.88
|
||||
193,ANW,282,AINSWORTH,NE,US,42.57,-99.99
|
||||
194,PIR,214,PIERRE,SD,US,44.4,-100.17
|
||||
195,HLC,335,HILL_CITY,KS,US,39.26,-100.23
|
||||
|
@ -222,11 +222,11 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
221,ISN,140,WILLISTON,ND,US,48.18,-103.63
|
||||
222,MRF,190,MARFA,TX,US,30.3,-103.95
|
||||
223,PUB,220,PUEBLO,CO,US,38.29,-104.43
|
||||
224,ROW,233,ROSWELL,NM,US,33.34,-104.62
|
||||
224,CME,233,CHISUM,NM,US,33.34,-104.62
|
||||
225,DEN,71,DENVER,CO,US,39.81,-104.66
|
||||
226,CYS,301,CHEYENNE,WY,US,41.21,-104.77
|
||||
227,CIM,297,CIMARRON,NM,US,36.49,-104.87
|
||||
228,LVS,163,LAS_VEGAS,NM,US,35.66,-105.14
|
||||
228,FTI,163,FT_UNION,NM,US,35.66,-105.14
|
||||
229,LAR,148,LARAMIE,WY,US,41.33,-105.72
|
||||
230,ALS,13,ALAMOSA,CO,US,37.35,-105.82
|
||||
231,MLS,182,MILES_CITY,MT,US,46.38,-105.95
|
||||
|
@ -242,7 +242,7 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
241,CHE,54,HAYDEN,CO,US,40.52,-107.31
|
||||
242,DMN,76,DEMING,NM,US,32.28,-107.6
|
||||
243,YYN,400,SWIFT_CURRENT,SA,CN,50.28,-107.68
|
||||
244,FMN,103,FARMINGTON,NM,US,36.75,-108.1
|
||||
244,RSK,103,RATTLESNAKE,NM,US,36.75,-108.1
|
||||
245,BOY,290,BOYSEN_RESV.,WY,US,43.46,-108.3
|
||||
246,BIL,31,BILLINGS,MT,US,45.81,-108.63
|
||||
247,JNC,347,GRAND_JUNCTION,CO,US,39.06,-108.79
|
||||
|
@ -312,8 +312,8 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
311,OAK,356,OAKLAND,CA,US,37.73,-122.22
|
||||
312,RBL,225,RED_BLUFF,CA,US,40.1,-122.24
|
||||
313,SEA,243,SEATTLE,WA,US,47.44,-122.31
|
||||
314,BLI,35,BELLINGHAM,WA,US,48.95,-122.58
|
||||
315,PDX,208,PORTLAND,OR,US,45.58,-122.6
|
||||
314,HUH,35,WHATCOM,WA,US,48.95,-122.58
|
||||
315,BTG,208,BATTLE_GROUND,WA,US,45.75,-122.59
|
||||
316,PYE,371,POINT_REYES,CA,US,38.08,-122.87
|
||||
317,OED,362,MEDFORD,OR,US,42.48,-122.91
|
||||
318,EUG,93,EUGENE,OR,US,44.12,-123.22
|
||||
|
@ -388,3 +388,4 @@ COPY stns.VORS(PKEY, STATION_ID, STATION_NUM, NAME, STATE, COUNTRY, LATITUDE, LO
|
|||
387,LIH,0,LIHUE,HI,US,21.97,-159.34
|
||||
388,SOK,0,SOUTH_KAUAI,HI,US,21.9,-159.53
|
||||
\.
|
||||
|
||||
|
|
|
@ -136,4 +136,8 @@
|
|||
id="com.raytheon.uf.edex.dataprovideragent.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.remote.script.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
</feature>
|
||||
|
|
|
@ -9010,4 +9010,15 @@
|
|||
<grib2Value>14</grib2Value>
|
||||
</grib1Parameter>
|
||||
<!-- end ukmetHR -->
|
||||
|
||||
<!-- cmc RELH -->
|
||||
<grib1Parameter>
|
||||
<center>54</center>
|
||||
<grib1TableVersion>2</grib1TableVersion>
|
||||
<grib1Value>52</grib1Value>
|
||||
<grib2discipline>0</grib2discipline>
|
||||
<grib2category>1</grib2category>
|
||||
<grib2Value>1</grib2Value>
|
||||
</grib1Parameter>
|
||||
<!-- end CMC -->
|
||||
</grib1ParameterSet>
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<latLonGridCoverage>
|
||||
<name>ECMWF-wave-0p25deg</name>
|
||||
<description>Global Latitude/Longitude 0.25 deg Resolution</description>
|
||||
<la1>90.0</la1>
|
||||
<lo1>-180.0</lo1>
|
||||
<firstGridPointCorner>UpperLeft</firstGridPointCorner>
|
||||
<nx>1441</nx>
|
||||
<ny>685</ny>
|
||||
<dx>0.25</dx>
|
||||
<dy>0.25</dy>
|
||||
<spacingUnit>degree</spacingUnit>
|
||||
<la2>-81.0</la2>
|
||||
<lo2>180.0</lo2>
|
||||
</latLonGridCoverage>
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<lambertConformalGridCoverage>
|
||||
<name>14731025001</name>
|
||||
<description>HIRES - 5 km Western US</description>
|
||||
<la1>12.19</la1>
|
||||
<lo1>-133.459</lo1>
|
||||
<firstGridPointCorner>LowerLeft</firstGridPointCorner>
|
||||
<nx>1473</nx>
|
||||
<ny>1025</ny>
|
||||
<dx>5.079</dx>
|
||||
<dy>5.079</dy>
|
||||
<spacingUnit>km</spacingUnit>
|
||||
<minorAxis>6371229.0</minorAxis>
|
||||
<majorAxis>6371229.0</majorAxis>
|
||||
<lov>-95.0</lov>
|
||||
<latin1>25.</latin1>
|
||||
<latin2>25.</latin2>
|
||||
</lambertConformalGridCoverage>
|
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<polarStereoGridCoverage>
|
||||
<name>493399001</name>
|
||||
<description>15km HR CMC</description>
|
||||
<la1>32.548</la1>
|
||||
<lo1>-134.62</lo1>
|
||||
<firstGridPointCorner>LowerLeft</firstGridPointCorner>
|
||||
<nx>493</nx>
|
||||
<ny>399</ny>
|
||||
<dx>15</dx>
|
||||
<dy>15</dy>
|
||||
<spacingUnit>km</spacingUnit>
|
||||
<minorAxis>6367470.0</minorAxis>
|
||||
<majorAxis>6367470.0</majorAxis>
|
||||
<lov>-111.0</lov>
|
||||
<lad>60</lad>
|
||||
</polarStereoGridCoverage>
|
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<polarStereoGridCoverage>
|
||||
<name>825553001</name>
|
||||
<description>ESTOFS HIRES - 6 km Alaska</description>
|
||||
<la1>40.530101</la1>
|
||||
<lo1>-178.571</lo1>
|
||||
<firstGridPointCorner>LowerLeft</firstGridPointCorner>
|
||||
<nx>825</nx>
|
||||
<ny>553</ny>
|
||||
<dx>5.953125</dx>
|
||||
<dy>5.953125</dy>
|
||||
<spacingUnit>km</spacingUnit>
|
||||
<minorAxis>6371229.0</minorAxis>
|
||||
<majorAxis>6371229.0</majorAxis>
|
||||
<lov>-150.0</lov>
|
||||
<lad>60</lad>
|
||||
</polarStereoGridCoverage>
|
|
@ -0,0 +1,37 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<mercatorGridCoverage>
|
||||
<name>321225001</name>
|
||||
<description>Extratropical Storm and Tide Operation Forecast System (Hawaii)</description>
|
||||
<la1>18.067</la1>
|
||||
<lo1>-161.525</lo1>
|
||||
<firstGridPointCorner>LowerLeft</firstGridPointCorner>
|
||||
<nx>321</nx>
|
||||
<ny>225</ny>
|
||||
<dx>2.5</dx>
|
||||
<dy>2.5</dy>
|
||||
<spacingUnit>km</spacingUnit>
|
||||
<minorAxis>6371229.0</minorAxis>
|
||||
<majorAxis>6371229.0</majorAxis>
|
||||
<latin>20.0</latin>
|
||||
<la2>23.082</la2>
|
||||
<lo2>-153.969</lo2>
|
||||
</mercatorGridCoverage>
|
|
@ -0,0 +1,32 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<latLonGridCoverage>
|
||||
<name>1440721001</name>
|
||||
<description>High Resolution (0.25 by 0.25) CED grids</description>
|
||||
<la1>90.0</la1>
|
||||
<lo1>0.0</lo1>
|
||||
<firstGridPointCorner>UpperLeft</firstGridPointCorner>
|
||||
<nx>1440</nx>
|
||||
<ny>721</ny>
|
||||
<dx>.25</dx>
|
||||
<dy>.25</dy>
|
||||
<spacingUnit>degree</spacingUnit>
|
||||
</latLonGridCoverage>
|
|
@ -0,0 +1,37 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<lambertConformalGridCoverage>
|
||||
<name>17991059001</name>
|
||||
<description>3 km LCC limited domain CONUS grid (used by HRRR)</description>
|
||||
<la1>21.138</la1>
|
||||
<lo1>-122.72</lo1>
|
||||
<firstGridPointCorner>LowerLeft</firstGridPointCorner>
|
||||
<nx>1799</nx>
|
||||
<ny>1059</ny>
|
||||
<dx>3.0</dx>
|
||||
<dy>3.0</dy>
|
||||
<spacingUnit>km</spacingUnit>
|
||||
<minorAxis>6371229.0</minorAxis>
|
||||
<majorAxis>6371229.0</majorAxis>
|
||||
<lov>-97.5</lov>
|
||||
<latin1>38.5</latin1>
|
||||
<latin2>38.5</latin2>
|
||||
</lambertConformalGridCoverage>
|
|
@ -0,0 +1,34 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
||||
U.S._EXPORT_CONTROLLED_TECHNICAL_DATA
|
||||
This_software_product_contains_export-restricted_data_whose
|
||||
export/transfer/disclosure_is_restricted_by_U.S._law._Dissemination
|
||||
to_non-U.S._persons_whether_in_the_United_States_or_abroad_requires
|
||||
an_export_license_or_other_authorization.
|
||||
|
||||
Contractor_Name:________Raytheon_Company
|
||||
Contractor_Address:_____6825_Pine_Street,_Suite_340
|
||||
________________________Mail_Stop_B8
|
||||
________________________Omaha,_NE_68106
|
||||
________________________402.291.0100
|
||||
|
||||
See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for
|
||||
further_licensing_information.
|
||||
-->
|
||||
<latLonGridCoverage>
|
||||
<name>193</name>
|
||||
<description>Global Latitude/Longitude 0.25 deg Resolution</description>
|
||||
<la1>90.0</la1>
|
||||
<lo1>0.0</lo1>
|
||||
<firstGridPointCorner>UpperLeft</firstGridPointCorner>
|
||||
<nx>1440</nx>
|
||||
<ny>721</ny>
|
||||
<dx>.25</dx>
|
||||
<dy>.25</dy>
|
||||
<spacingUnit>degree</spacingUnit>
|
||||
<la2>-90.0</la2>
|
||||
<lo2>359.75</lo2>
|
||||
</latLonGridCoverage>
|
|
@ -11,20 +11,6 @@
|
|||
<gribModelSet>
|
||||
|
||||
<!-- SUBCENTER 0 -->
|
||||
<model>
|
||||
<name>ecens</name>
|
||||
<center>98</center>
|
||||
<subcenter>0</subcenter>
|
||||
<grid>360181001</grid>
|
||||
<process>
|
||||
<id>141</id>
|
||||
<id>142</id>
|
||||
<id>143</id>
|
||||
<id>144</id>
|
||||
<id>145</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>ecmwfP25</name>
|
||||
<center>98</center>
|
||||
|
|
|
@ -10,6 +10,26 @@
|
|||
|
||||
<gribModelSet>
|
||||
<!-- SUBCENTER 0 -->
|
||||
<model>
|
||||
<name>estofsHW</name>
|
||||
<center>7</center>
|
||||
<subcenter>4</subcenter>
|
||||
<grid>321225001</grid>
|
||||
<process>
|
||||
<id>14</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>estofsAK</name>
|
||||
<center>7</center>
|
||||
<subcenter>4</subcenter>
|
||||
<grid>825553001</grid>
|
||||
<process>
|
||||
<id>14</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>estofsPR</name>
|
||||
<center>7</center>
|
||||
|
@ -1074,6 +1094,16 @@
|
|||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>hrrr</name>
|
||||
<center>7</center>
|
||||
<subcenter>0</subcenter>
|
||||
<grid>17991059001</grid>
|
||||
<process>
|
||||
<id>83</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>RTMA</name>
|
||||
<center>7</center>
|
||||
|
@ -1415,6 +1445,26 @@
|
|||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HiResW-NMM-US</name>
|
||||
<center>7</center>
|
||||
<subcenter>0</subcenter>
|
||||
<grid>14731025001</grid>
|
||||
<process>
|
||||
<id>112</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HiResW-ARW-US</name>
|
||||
<center>7</center>
|
||||
<subcenter>0</subcenter>
|
||||
<grid>14731025001</grid>
|
||||
<process>
|
||||
<id>116</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>HiResW-NMM-East</name>
|
||||
<center>7</center>
|
||||
|
@ -2430,16 +2480,6 @@
|
|||
|
||||
<!-- END SUBCENTER 2: NCEP ENSEMBLE PRODUCTS -->
|
||||
|
||||
<model>
|
||||
<name>hysplit</name>
|
||||
<center>7</center>
|
||||
<subcenter>3</subcenter>
|
||||
<grid>36928220</grid>
|
||||
<process>
|
||||
<id>3</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<!-- SUBCENTER 4: ENVIRONMENTAL MODELING CENTER -->
|
||||
|
||||
<model>
|
||||
|
|
|
@ -12,6 +12,16 @@
|
|||
|
||||
<!-- SUBCENTER 0 -->
|
||||
|
||||
<model>
|
||||
<name>cmcHR</name>
|
||||
<center>53</center>
|
||||
<subcenter>0</subcenter>
|
||||
<grid>493399001</grid>
|
||||
<process>
|
||||
<id>36</id>
|
||||
</process>
|
||||
</model>
|
||||
|
||||
<model>
|
||||
<name>CanadianModel</name>
|
||||
<center>54</center>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<?xml version="2.0" encoding="UTF-8" standalone="yes"?>
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!--
|
||||
This_software_was_developed_and_/_or_modified_by_Raytheon_Company,
|
||||
pursuant_to_Contract_DG133W-05-CQ-1067_with_the_US_Government.
|
||||
|
|
|
@ -140,5 +140,4 @@
|
|||
<alias base="SP">SPXX</alias>
|
||||
<alias base="IDRL">IDRL</alias>
|
||||
<alias base="SRCS">SRCS</alias>
|
||||
<alias base="NTAT">NTAT</alias>
|
||||
</aliasList>
|
||||
</aliasList>
|
||||
|
|
|
@ -211,6 +211,7 @@
|
|||
<alias base="MCDC">CLDM</alias>
|
||||
<alias base="MIXR">MIXR</alias>
|
||||
<alias base="MMSP">MMSL</alias>
|
||||
<alias base="MN2T66hr">TKMN06</alias>
|
||||
<alias base="MnT">TMNK</alias>
|
||||
<alias base="MnT1hr">TMNK01</alias>
|
||||
<alias base="MnT3hr">TMNK03</alias>
|
||||
|
@ -220,6 +221,7 @@
|
|||
<alias base="MnT18hr">TMNK18</alias>
|
||||
<alias base="MnT24hr">TMNK24</alias>
|
||||
<alias base="MnT48hr">TMNK48</alias>
|
||||
<alias base="MX2T66hr">TKMX06</alias>
|
||||
<alias base="MxT">TMXK</alias>
|
||||
<alias base="MxT1hr">TMXK01</alias>
|
||||
<alias base="MxT3hr">TMXK03</alias>
|
||||
|
@ -303,6 +305,7 @@
|
|||
<alias base="TCC18hr">TCLD18</alias>
|
||||
<alias base="TCC24hr">TCLD24</alias>
|
||||
<alias base="TCC48hr">TCLD48</alias>
|
||||
<alias base="TCWAT">TCWTR</alias>
|
||||
<alias base="ThP">TSTM</alias>
|
||||
<alias base="TOZNE">TOZO</alias>
|
||||
<alias base="TP">APCP</alias>
|
||||
|
@ -355,6 +358,7 @@
|
|||
<alias base="USWRF48hr">SWRU48</alias>
|
||||
<alias base="uW">UREL</alias>
|
||||
<alias base="vW">VREL</alias>
|
||||
<alias base="VAFTAD1hr">VASH01</alias>
|
||||
<alias base="VAPP">VAPR</alias>
|
||||
<alias base="VFLX">VRELFX</alias>
|
||||
<alias base="VFLX1hr">VRELFX01</alias>
|
||||
|
@ -399,4 +403,4 @@
|
|||
<alias base="WVDIR">DRCTWW</alias>
|
||||
<alias base="WVHGT">HGHTWW</alias>
|
||||
<alias base="WVPER">PERDWW</alias>
|
||||
</aliasList>
|
||||
</aliasList>
|
||||
|
|
7
edexOsgi/com.raytheon.uf.common.remote.script/.classpath
Normal file
7
edexOsgi/com.raytheon.uf.common.remote.script/.classpath
Normal file
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry excluding="com/raytheon/uf/common/remote/script/data/" kind="src" path="src"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
28
edexOsgi/com.raytheon.uf.common.remote.script/.project
Normal file
28
edexOsgi/com.raytheon.uf.common.remote.script/.project
Normal file
|
@ -0,0 +1,28 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.common.remote.script</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1,11 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -0,0 +1,14 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Common
|
||||
Bundle-SymbolicName: com.raytheon.uf.common.remote.script
|
||||
Bundle-Version: 1.14.0.qualifier
|
||||
Bundle-Vendor: RATHEON
|
||||
Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.auth;bundle-version="1.12.1174",
|
||||
org.apache.commons.lang;bundle-version="2.3.0"
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Bundle-ActivationPolicy: lazy
|
||||
Export-Package: com.raytheon.uf.common.remote.script
|
|
@ -0,0 +1,4 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.
|
|
@ -0,0 +1,114 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.remote.script;
|
||||
|
||||
/**
|
||||
* Useful constants for remote scripts.
|
||||
* <p>
|
||||
* The resource defaults noted here are the ones defined in this class and are
|
||||
* used when the a property value is not defined. These values can be overridden
|
||||
* for the EDEX server by defining a property value in the EDEX resources file:
|
||||
* com.raytheon.uf.edex.remote.script.properties.
|
||||
* <p>
|
||||
* Except where noted a given run request may override a value. See
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}.
|
||||
*
|
||||
* <pre>
|
||||
* Resource properties:
|
||||
*
|
||||
* KEY VALUES DESCRIPTION
|
||||
* ======================================================
|
||||
* remote.script.directory Localized directories to search for scripts. (default
|
||||
* remoteScripts). Cannot be overridden in a request.
|
||||
* remote.script.timeout Kill script process if it doesn't finish by this
|
||||
* number of seconds (default 30).
|
||||
* remote.script.use.stderr When true separate standard out and standard error
|
||||
* (default false).
|
||||
* remote.script.setup.error Exit error to use when unable to run a script
|
||||
* (default 99).
|
||||
* </pre>
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 12, 2014 #2742 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class RemoteScriptConstants {
|
||||
/**
|
||||
* Resource key to obtain script directory. Cannot be overridden in script
|
||||
* properties. See
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}. .
|
||||
*/
|
||||
public static final String scriptDirectoryKey = "remote.script.directory";
|
||||
|
||||
/**
|
||||
* Resource/Property key to obtain execution timeout in seconds. See @{link
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}.
|
||||
*/
|
||||
public static final String scriptTimeoutKey = "remote.script.timeout";
|
||||
|
||||
/**
|
||||
* Resource/Property key to obtain boolean to separate standard error from
|
||||
* the standard out stream. See
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}.
|
||||
*/
|
||||
public static final String scriptUseStdErrKey = "remote.script.use.stderr";
|
||||
|
||||
/**
|
||||
* Resource/Property key to obtain exit value to use when unable to run the
|
||||
* script.
|
||||
*/
|
||||
public static final String scriptSetupErrrorKey = "remote.script.setup.error";
|
||||
|
||||
/**
|
||||
* Default common static directory for remote scripts. See
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}.
|
||||
*/
|
||||
public static final String scriptDirectoryDefault = "remoteScripts";
|
||||
|
||||
/**
|
||||
* Default time out value in seconds. See
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}.
|
||||
*/
|
||||
public static final String scriptTimeoutDefault = "30";
|
||||
|
||||
/**
|
||||
* Default flag to separate standard error from the standard out stream. See
|
||||
* {@link RemoteScriptRunRequest#putProperty(String, String)}.
|
||||
*/
|
||||
public static final String scriptUseStdErrDefault = "false";
|
||||
|
||||
/**
|
||||
* Error exit value to use when unable to run the script.
|
||||
*/
|
||||
public static final String scriptSetUpErrorDefault = "99";
|
||||
|
||||
private RemoteScriptConstants() {
|
||||
}
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.remote.script;
|
||||
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* This class is a request to obtain listing of remote scripts from desired
|
||||
* localization directories.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 13, 2014 2742 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
@DynamicSerialize
|
||||
public class RemoteScriptListRequest extends RemoteScriptRequest {
|
||||
|
||||
/**
|
||||
* The contexts to search for scripts.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private LocalizationContext[] contexts;
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
*/
|
||||
public RemoteScriptListRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param userId
|
||||
*/
|
||||
public RemoteScriptListRequest(String userId, LocalizationContext[] contexts) {
|
||||
this();
|
||||
setUserId(userId);
|
||||
setContexts(contexts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter.
|
||||
*
|
||||
* @return contexts
|
||||
*/
|
||||
public LocalizationContext[] getContexts() {
|
||||
return contexts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter.
|
||||
*
|
||||
* @param contexts
|
||||
* - when null contexts is cleared.
|
||||
*/
|
||||
public void setContexts(LocalizationContext[] contexts) {
|
||||
if (contexts != null) {
|
||||
this.contexts = contexts;
|
||||
} else {
|
||||
contexts = new LocalizationContext[0];
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder("RemoteScriptListRequest {");
|
||||
sb.append("userId: ").append(getUserId());
|
||||
sb.append(", contexts: ").append(getContexts());
|
||||
sb.append("}");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,163 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.remote.script;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* This contains Localization Context map that is the result of a remote script
|
||||
* list request. The keys are sorted list of the script file names.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 11, 2014 2742 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
@DynamicSerialize
|
||||
public class RemoteScriptListResponse {
|
||||
/**
|
||||
* Map of contexts with sorted list of scripts as keys.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private Map<String, List<LocalizationContext>> scripts = new TreeMap<String, List<LocalizationContext>>(
|
||||
String.CASE_INSENSITIVE_ORDER);
|
||||
|
||||
/**
|
||||
* Default Constructor.
|
||||
*/
|
||||
public RemoteScriptListResponse() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param scripts
|
||||
*/
|
||||
public RemoteScriptListResponse(
|
||||
Map<String, List<LocalizationContext>> scripts) {
|
||||
setScripts(scripts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter map of scripts' context lists the script keys are sorted.
|
||||
*
|
||||
* @return scripts
|
||||
*/
|
||||
public Map<String, List<LocalizationContext>> getScripts() {
|
||||
return scripts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter.
|
||||
*
|
||||
* @param scripts
|
||||
* - when null scripts are cleared.
|
||||
*/
|
||||
public void setScripts(Map<String, List<LocalizationContext>> scripts) {
|
||||
this.scripts.clear();
|
||||
if (scripts != null) {
|
||||
this.scripts.putAll(scripts);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convince method to a add file's script name and context to scripts.
|
||||
*
|
||||
* @param lFile
|
||||
* @return true if not already in the set
|
||||
*/
|
||||
public boolean add(LocalizationFile lFile) {
|
||||
String script = getName(lFile);
|
||||
LocalizationContext context = lFile.getContext();
|
||||
|
||||
List<LocalizationContext> contexts = scripts.get(script);
|
||||
if (contexts == null) {
|
||||
contexts = new ArrayList<LocalizationContext>();
|
||||
scripts.put(script, contexts);
|
||||
}
|
||||
|
||||
if (!contexts.contains(context)) {
|
||||
contexts.add(context);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convince method to remove the file's context from scripts.
|
||||
*
|
||||
* @param true - if file's script and context was in scripts.
|
||||
*/
|
||||
public boolean remove(LocalizationFile lFile) {
|
||||
String name = getName(lFile);
|
||||
List<LocalizationContext> contexts = scripts.get(name);
|
||||
if (contexts == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (contexts.remove(contexts)) {
|
||||
if (contexts.size() == 0) {
|
||||
scripts.remove(name);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the script name of the localized file.
|
||||
*
|
||||
* @param lFile
|
||||
* @return name
|
||||
*/
|
||||
private String getName(LocalizationFile lFile) {
|
||||
String name = lFile.getName().trim();
|
||||
return name.substring(name.lastIndexOf(IPathManager.SEPARATOR) + 1);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
public String toString() {
|
||||
return String.format("RemoteScriptListResponse: {scripts: %s}",
|
||||
getScripts());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.remote.script;
|
||||
|
||||
import com.raytheon.uf.common.auth.req.AbstractPrivilegedRequest;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* Class with common elements for the remote script requests,
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 11, 2014 2742 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
@DynamicSerialize
|
||||
public class RemoteScriptRequest extends AbstractPrivilegedRequest {
|
||||
|
||||
/** User making the request. */
|
||||
@DynamicSerializeElement
|
||||
private String userId;
|
||||
|
||||
/** not authorized message. */
|
||||
@DynamicSerializeElement
|
||||
private String notAuthorizedMessage = "Not Authorized";
|
||||
|
||||
/**
|
||||
* Getter.
|
||||
*
|
||||
* @return userId
|
||||
*/
|
||||
public String getUserId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter.
|
||||
*
|
||||
* @param userId
|
||||
*/
|
||||
public void setUserId(String userId) {
|
||||
this.userId = userId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter.
|
||||
*
|
||||
* @return notAuthorizedMessage
|
||||
*/
|
||||
public String getNotAuthorizedMessage() {
|
||||
return notAuthorizedMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter.
|
||||
*
|
||||
* @param notAuthorizedMessage
|
||||
*/
|
||||
public void setNotAuthorizedMessage(String notAuthorizedMessage) {
|
||||
this.notAuthorizedMessage = notAuthorizedMessage;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,263 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.remote.script;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang.text.StrMatcher;
|
||||
import org.apache.commons.lang.text.StrTokenizer;
|
||||
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* This class is a request to run a remote script from desired localization
|
||||
* directory.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 19, 2014 2743 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
@DynamicSerialize
|
||||
public class RemoteScriptRunRequest extends RemoteScriptRequest {
|
||||
@DynamicSerializeElement
|
||||
String script;
|
||||
|
||||
@DynamicSerializeElement
|
||||
LocalizationContext context;
|
||||
|
||||
/**
|
||||
* Mapping of resource properties to override. See
|
||||
* {@link RemoteScriptConstants}.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private Map<String, String> propertyMap = new HashMap<String, String>();
|
||||
|
||||
/**
|
||||
* Command line arguments for the remote script.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private List<String> scriptArguments = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
*/
|
||||
public RemoteScriptRunRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param userId
|
||||
* - user to run the script
|
||||
* @param filename
|
||||
* - script's filename located in the script directory. See
|
||||
* {@link RemoteScriptConstants}.
|
||||
* @param context
|
||||
* - The localize directory's context that contains the script.
|
||||
* See {@link RemoteScriptListResponse#get(String)}
|
||||
*/
|
||||
public RemoteScriptRunRequest(String userId, String script,
|
||||
LocalizationContext context) {
|
||||
this();
|
||||
setUserId(userId);
|
||||
setScript(script);
|
||||
setContext(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the context associated with the script's filename. See
|
||||
* {@link RemoteScriptListResponse#get(String)}.
|
||||
*
|
||||
* @return context
|
||||
*/
|
||||
public LocalizationContext getContext() {
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the context associated with the script's filename. See
|
||||
* {@link RemoteScriptListResponse#get(String)}.
|
||||
*
|
||||
* @param context
|
||||
*/
|
||||
public void setContext(LocalizationContext context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter for script property map. Recommend this only be used for
|
||||
* serialization, see {@link RemoteScriptConstants}.
|
||||
*
|
||||
* @return propertyMap
|
||||
*/
|
||||
public Map<String, String> getPropertyMap() {
|
||||
return propertyMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter for script property map. This is used to override the script
|
||||
* property values for handling the script. Recommend this only be used for
|
||||
* serialization, see {@link RemoteScriptConstants}.
|
||||
*
|
||||
* @param propertyMap
|
||||
* - when null map cleared so default values will be used
|
||||
*/
|
||||
public void setPropertyMap(Map<String, String> propertyMap) {
|
||||
this.propertyMap.clear();
|
||||
if (propertyMap != null) {
|
||||
this.propertyMap.putAll(propertyMap);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Override the default resource property, see {@link RemoteScriptConstants}
|
||||
* .
|
||||
*
|
||||
* @param key
|
||||
* @param value
|
||||
* @return oldValue - previous value for key
|
||||
*/
|
||||
public String putProperty(String key, String value) {
|
||||
return propertyMap.put(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove property.
|
||||
*
|
||||
* @param key
|
||||
* - property to remove
|
||||
* @return value - Value removed or null if none
|
||||
*/
|
||||
public String removeProperty(String key) {
|
||||
return propertyMap.remove(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of command line arguments for the script in the order sent to
|
||||
* the process builder. See {@link java.lang.ProcessBuilder#command(List)}.
|
||||
* Note the full path name to the script to run will be added by the handler
|
||||
* when sending the request.
|
||||
*
|
||||
* @return scriptArguments
|
||||
*/
|
||||
public List<String> getScriptArguments() {
|
||||
return scriptArguments;
|
||||
}
|
||||
|
||||
public String getScript() {
|
||||
return script;
|
||||
}
|
||||
|
||||
public void setScript(String script) {
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of command line arguments for the script in the order they
|
||||
* are to be sent to the process builder. See
|
||||
* {@link java.lang.ProcessBuilder#command(List)}. Note the full path name
|
||||
* to the script to run will be added by the handler when sending the
|
||||
* request.
|
||||
*
|
||||
* @param scriptArguments
|
||||
*/
|
||||
public void setScriptArguments(List<String> scriptArguments) {
|
||||
this.scriptArguments.clear();
|
||||
if (scriptArguments != null) {
|
||||
this.scriptArguments.addAll(scriptArguments);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Append an argument to the script's argument list.
|
||||
*
|
||||
* @param argument
|
||||
* @return success - true when added to the list
|
||||
*/
|
||||
public boolean addScriptArgument(String argument) {
|
||||
return scriptArguments.add(argument);
|
||||
}
|
||||
|
||||
/**
|
||||
* This clears the script argument list and breaks arguments into a list
|
||||
* which becomes the new script argument list. The arguments is split into
|
||||
* tokens delimited by whitespace. A token may be surrounded by single or
|
||||
* double quotes. A quote may be escaped within a quoted section by
|
||||
* duplicating itself.
|
||||
*
|
||||
* <pre>
|
||||
* Examples of lists from java strings:
|
||||
* "a b c" - Three arguments [a, b, c]
|
||||
* "1 'a b c'd 3" - Three arguments [1, a b cd, 3]
|
||||
* "1 \"a b c\"d 3" - Three arguments [1, a b cd, 3]
|
||||
* "1 'a b ''c'''d 3" - Three arguments [1, a b 'c'd, 3]
|
||||
* "1 \"a b \"\"c\"\"\"d 3" - Three arguments [1, a b "c"d, 3]
|
||||
* </pre>
|
||||
*/
|
||||
public void parseAndSetScriptArguments(String arguments) {
|
||||
if (arguments == null || (arguments.trim().length() == 0)) {
|
||||
clearScriptArguements();
|
||||
return;
|
||||
}
|
||||
String[] args = (new StrTokenizer(arguments, StrMatcher.spaceMatcher(),
|
||||
StrMatcher.quoteMatcher())).getTokenArray();
|
||||
setScriptArguments(Arrays.asList(args));
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all arguments from the script's argument list.
|
||||
*/
|
||||
public void clearScriptArguements() {
|
||||
scriptArguments.clear();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder(getClass().getName());
|
||||
sb.append("[ userId: ").append(getUserId());
|
||||
sb.append(", script: ").append(getScript());
|
||||
sb.append(", context: ").append(getContext());
|
||||
sb.append(", propertyMap: ").append(propertyMap);
|
||||
sb.append(", scriptArguments: ").append(getScriptArguments());
|
||||
sb.append("]");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,172 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.remote.script;
|
||||
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* This contains the results from running a remote script.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 19, 2014 2743 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
@DynamicSerialize
|
||||
public class RemoteScriptRunResponse {
|
||||
/**
|
||||
* The exit status for the script.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private Integer exitStatus;
|
||||
|
||||
/**
|
||||
* The scripts output either just stdout or a combination of stadout and
|
||||
* stderr. See {@link RemoteScriptConstants}.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private String output = "";
|
||||
|
||||
/**
|
||||
* When requested a separate sting with stderr. See
|
||||
* {@link RemoteScriptConstants}.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private String error = "";
|
||||
|
||||
/**
|
||||
* When true script process timed out and was killed otherwise false. See
|
||||
* {@link RemoteScriptConstants}.
|
||||
*/
|
||||
@DynamicSerializeElement
|
||||
private boolean timedOut = false;
|
||||
|
||||
/**
|
||||
* Default Constructor.
|
||||
*/
|
||||
public RemoteScriptRunResponse() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the exit status for the process running the script.
|
||||
*
|
||||
* @return exitStatus
|
||||
*/
|
||||
public Integer getExitStatus() {
|
||||
return exitStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the exit status for script's process. Should only be used by the
|
||||
* handler and serialization.
|
||||
*
|
||||
* @param exitStatus
|
||||
*/
|
||||
public void setExitStatus(Integer exitStatus) {
|
||||
this.exitStatus = exitStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the script's output. Based on the request properties this is either
|
||||
* just stdout or a combination of stdout and stderr. See
|
||||
* {@link RemoteScriptConstants}.
|
||||
*
|
||||
* @return output - never null
|
||||
*/
|
||||
public String getOutput() {
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the script's output. Based on the request properties this is either
|
||||
* just stdout or a combination of stdout and stderr. Should only be used by
|
||||
* the handler and serialization. See {@link RemoteScriptConstants}.
|
||||
*
|
||||
* @param output
|
||||
*/
|
||||
public void setOutput(String output) {
|
||||
this.output = (output == null) ? "" : output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get script's stderr when not placed in output.
|
||||
*
|
||||
* @return error - never null
|
||||
*/
|
||||
public String getError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set script's stderr when not place in output. Should only be used by the
|
||||
* handler and serialization.
|
||||
*
|
||||
* @param error
|
||||
*/
|
||||
public void setError(String error) {
|
||||
this.error = (error == null) ? "" : error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flag to indicate script did not finish in the desired number of seconds.
|
||||
* See {@link RemoteScriptConstants}.
|
||||
*
|
||||
* @return true when script process is killed otherwise false
|
||||
*/
|
||||
public boolean isTimedOut() {
|
||||
return timedOut;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the timed out flag. Should only be used by the handler and
|
||||
* serialization. See {@link RemoteScriptConstants}.
|
||||
*
|
||||
* @param timeOut
|
||||
*/
|
||||
public void setTimedOut(boolean timedOut) {
|
||||
this.timedOut = timedOut;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder(this.getClass().getName());
|
||||
sb.append("[");
|
||||
sb.append("exitStatus: ").append(getExitStatus());
|
||||
sb.append(", timedOut: ").append(isTimedOut());
|
||||
sb.append(", error: \"").append(getError()).append("\"");
|
||||
sb.append(", output: \"").append(getOutput()).append("\"");
|
||||
sb.append("]");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
17
edexOsgi/com.raytheon.uf.edex.remote.script.feature/.project
Normal file
17
edexOsgi/com.raytheon.uf.edex.remote.script.feature/.project
Normal file
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.edex.remote.script.feature</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.FeatureBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.FeatureNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1 @@
|
|||
bin.includes = feature.xml
|
|
@ -0,0 +1,34 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feature
|
||||
id="com.raytheon.uf.edex.remote.script.feature"
|
||||
label="Remote Script Feature"
|
||||
version="1.0.0.qualifier"
|
||||
provider-name="RAYTHEON">
|
||||
|
||||
<description url="http://www.example.com/description">
|
||||
[Enter Feature Description here.]
|
||||
</description>
|
||||
|
||||
<copyright url="http://www.example.com/copyright">
|
||||
[Enter Copyright Description here.]
|
||||
</copyright>
|
||||
|
||||
<license url="http://www.example.com/license">
|
||||
[Enter License Description here.]
|
||||
</license>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.remote.script"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.remote.script"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
</feature>
|
7
edexOsgi/com.raytheon.uf.edex.remote.script/.classpath
Normal file
7
edexOsgi/com.raytheon.uf.edex.remote.script/.classpath
Normal file
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry kind="src" path="src"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
28
edexOsgi/com.raytheon.uf.edex.remote.script/.project
Normal file
28
edexOsgi/com.raytheon.uf.edex.remote.script/.project
Normal file
|
@ -0,0 +1,28 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.edex.remote.script</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1,11 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -0,0 +1,18 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: RemoteScript
|
||||
Bundle-SymbolicName: com.raytheon.uf.edex.remote.script
|
||||
Bundle-Version: 1.14.0.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.localization;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.remote.script;bundle-version="1.0.0",
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.edex.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.auth;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.time;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.util;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.comm;bundle-version="1.12.1174"
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Export-Package: com.raytheon.uf.edex.remote.script
|
|
@ -0,0 +1,5 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
res/
|
|
@ -0,0 +1,25 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
|
||||
|
||||
<!-- Instantiate the handler class for RemoteScriptList Handler -->
|
||||
<bean id="RemoteScriptListHandler"
|
||||
class="com.raytheon.uf.edex.remote.script.RemoteScriptListHandler"/>
|
||||
|
||||
<!-- Register the handler class with the RemoteScriptListRequest Register. -->
|
||||
<bean id="remoteScriptListHandlerRegistered" factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.remote.script.RemoteScriptListRequest"/>
|
||||
<constructor-arg ref="RemoteScriptListHandler"/>
|
||||
</bean>
|
||||
|
||||
<!-- Instantiate the handler class for RemoteScriptRun Handler -->
|
||||
<bean id="RemoteScriptRunHandler"
|
||||
class="com.raytheon.uf.edex.remote.script.RemoteScriptRunHandler"/>
|
||||
|
||||
<!-- Register the handler class with the RemoteScriptRunRequest Register. -->
|
||||
<bean id="remoteScriptRunHandlerRegistered" factory-bean="handlerRegistry" factory-method="register">
|
||||
<constructor-arg value="com.raytheon.uf.common.remote.script.RemoteScriptRunRequest"/>
|
||||
<constructor-arg ref="RemoteScriptRunHandler"/>
|
||||
</bean>
|
||||
</beans>
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# The values shown here are the default values in the RemoteScriptConstants class.
|
||||
# To change a property's default value for the EDEX server do the following:
|
||||
# 1 - Edit this file and uncomment the desired property line(s).
|
||||
# (remove the # at the start of the line.)
|
||||
# 2 - Change the property's value
|
||||
# 3 - Save the Changes
|
||||
# 4 - Restart EDEX request.
|
||||
|
||||
# The localized directory that contains scripts.
|
||||
remote.script.directory=ncep/remoteScripts
|
||||
|
||||
# Number of seconds to wait for script's process to complete before handler kills the process.
|
||||
# Note the value will be adjusted to be less then the HttpClient's socket timeout value.
|
||||
#remote.script.timeout=30
|
||||
|
||||
# Default use of standard error. When true Separate the standard output and error streams;
|
||||
# otherwise combine them into standard out.
|
||||
#remote.script.use.stderr=false
|
||||
|
||||
# Exit error to use when unable to run a script. Must be an integer.
|
||||
#remote.script.setup.error=99
|
|
@ -0,0 +1,174 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.remote.script;
|
||||
|
||||
import com.raytheon.uf.common.auth.exception.AuthorizationException;
|
||||
import com.raytheon.uf.common.auth.user.IUser;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptConstants;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptRequest;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.ITimer;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.edex.auth.AuthManager;
|
||||
import com.raytheon.uf.edex.auth.AuthManagerFactory;
|
||||
import com.raytheon.uf.edex.auth.req.AbstractPrivilegedRequestHandler;
|
||||
import com.raytheon.uf.edex.auth.resp.AuthorizationResponse;
|
||||
import com.raytheon.uf.edex.auth.roles.IRoleStorage;
|
||||
|
||||
/**
|
||||
* Abstract class for the remote script handlers. Performs authorization and
|
||||
* timing of requests.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 12, 2014 2742 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public abstract class AbstractRemoteScriptHandler extends
|
||||
AbstractPrivilegedRequestHandler<RemoteScriptRequest> {
|
||||
|
||||
/** Status handler of the handling class using this class. */
|
||||
protected final transient IUFStatusHandler statusHandler;
|
||||
|
||||
/** Common static directory for scripts. */
|
||||
protected final String scriptsDirectory;
|
||||
|
||||
/** The handler's roleId defined in the common remoteScriptAdminRoles.xml */
|
||||
protected final String roleId;
|
||||
|
||||
/**
|
||||
* Application name. This must match the application tag in the user role
|
||||
* file.
|
||||
*/
|
||||
private static final String APPLICATION = "Remote Script";
|
||||
|
||||
/**
|
||||
* Construct.
|
||||
*
|
||||
* @param statusHandler
|
||||
*/
|
||||
public AbstractRemoteScriptHandler(String roleId) {
|
||||
this.statusHandler = UFStatus.getHandler(this.getClass());
|
||||
this.roleId = roleId;
|
||||
|
||||
String scriptsDirectory = FileUtil.edexPath(System.getProperty(
|
||||
RemoteScriptConstants.scriptDirectoryKey,
|
||||
RemoteScriptConstants.scriptDirectoryDefault));
|
||||
|
||||
// Strip tailing separators.
|
||||
if (scriptsDirectory.endsWith(IPathManager.SEPARATOR)) {
|
||||
StringBuilder sb = new StringBuilder(scriptsDirectory);
|
||||
do {
|
||||
sb.setLength(sb.length() - 1);
|
||||
} while ((sb.length() > 0)
|
||||
&& (sb.lastIndexOf(IPathManager.SEPARATOR) == (sb.length() - 1)));
|
||||
scriptsDirectory = sb.toString();
|
||||
}
|
||||
this.scriptsDirectory = scriptsDirectory;
|
||||
}
|
||||
|
||||
/**
|
||||
* The method a subclass must implement to perform the work for the desired
|
||||
* request.
|
||||
*
|
||||
* @param request
|
||||
* @return results
|
||||
*/
|
||||
abstract protected Object performRequest(RemoteScriptRequest request);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
|
||||
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
|
||||
*/
|
||||
@Override
|
||||
public Object handleRequest(RemoteScriptRequest request) throws Exception {
|
||||
Object result = null;
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
||||
statusHandler.handle(Priority.INFO, String.format(
|
||||
"Start for %s, do %s", request.getUserId(), getRoleId()));
|
||||
}
|
||||
|
||||
ITimer timer = TimeUtil.getTimer();
|
||||
timer.start();
|
||||
result = performRequest(request);
|
||||
timer.stop();
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
||||
statusHandler.handle(
|
||||
Priority.INFO,
|
||||
String.format("Finish for %s, do %s, took %s",
|
||||
request.getUserId(), getRoleId(),
|
||||
TimeUtil.prettyDuration(timer.getElapsedTime())));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected String getRoleId() {
|
||||
return roleId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the authorization work for the handlers.
|
||||
*
|
||||
* @param user
|
||||
* @param request
|
||||
* @return authorizationResponse
|
||||
* @throws AuthorizationException
|
||||
*/
|
||||
public AuthorizationResponse authorized(IUser user,
|
||||
RemoteScriptRequest request) throws AuthorizationException {
|
||||
AuthManager manager = AuthManagerFactory.getInstance().getManager();
|
||||
IRoleStorage roleStorage = manager.getRoleStorage();
|
||||
|
||||
String roleId = getRoleId();
|
||||
|
||||
boolean authorized = roleStorage.isAuthorized(roleId, user.uniqueId()
|
||||
.toString(), APPLICATION);
|
||||
|
||||
if (authorized) {
|
||||
return new AuthorizationResponse(authorized);
|
||||
} else {
|
||||
String message = "Not Authorized to run " + roleId;
|
||||
if (statusHandler.isPriorityEnabled(Priority.INFO)) {
|
||||
statusHandler.handle(Priority.INFO,
|
||||
String.format("%s, %s", user.uniqueId(), message));
|
||||
}
|
||||
return new AuthorizationResponse(message);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.remote.script;
|
||||
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptListRequest;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptListResponse;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptRequest;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
||||
/**
|
||||
* Handler to get the remote script list.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 14, 2014 2742 rferrel Initial creation
|
||||
* Exclude files with md5 checksum extension.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class RemoteScriptListHandler extends AbstractRemoteScriptHandler {
|
||||
|
||||
/** Extension for check sum files to remove from listing. */
|
||||
private final String MD5_EXT = ".md5";
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public RemoteScriptListHandler() {
|
||||
// The role id in the common remoteScriptAdminRoles.xml
|
||||
super("remote.script.list");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.edex.remote.script.RemoteScriptHandler#performRequest
|
||||
* (com.raytheon.uf.common.remote.script.RemoteScriptRequest)
|
||||
*/
|
||||
public Object performRequest(RemoteScriptRequest request) {
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
RemoteScriptListRequest req = (RemoteScriptListRequest) request;
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
String.format("Request: %s", req));
|
||||
}
|
||||
|
||||
LocalizationContext[] ctxs = req.getContexts();
|
||||
|
||||
RemoteScriptListResponse result = new RemoteScriptListResponse();
|
||||
|
||||
for (LocalizationContext ctx : ctxs) {
|
||||
LocalizationFile[] lFiles = pm.listFiles(ctx, scriptsDirectory,
|
||||
null, false, true);
|
||||
if ((lFiles != null) && (lFiles.length > 0)) {
|
||||
for (LocalizationFile lFile : lFiles) {
|
||||
if (!lFile.getName().trim().endsWith(MD5_EXT)) {
|
||||
result.add(lFile);
|
||||
System.out.println(lFile.getFile().getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
String.format("Results: %s", result));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,303 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.remote.script;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.raytheon.uf.common.comm.HttpClient;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptConstants;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptRequest;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptRunRequest;
|
||||
import com.raytheon.uf.common.remote.script.RemoteScriptRunResponse;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.RunProcess;
|
||||
|
||||
/**
|
||||
* Handler to Run a remote script and return the results.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 19, 2014 2743 rferrel Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class RemoteScriptRunHandler extends AbstractRemoteScriptHandler {
|
||||
|
||||
/**
|
||||
* Time to back off from socket time out to allow completion of a timeout
|
||||
* script.
|
||||
*/
|
||||
private static final int BACKOFF_MSEC = 100;
|
||||
|
||||
/**
|
||||
* Resource timeout.
|
||||
*/
|
||||
private final int defaultTimeoutSec;
|
||||
|
||||
/**
|
||||
* Resource flag value.
|
||||
*/
|
||||
private final boolean defaultUseStdErrFlag;
|
||||
|
||||
/**
|
||||
* Resource set up exit status.
|
||||
*/
|
||||
private final int defaultSetupExit;
|
||||
|
||||
/**
|
||||
* Constructor setup roleId and resource property values.
|
||||
*/
|
||||
public RemoteScriptRunHandler() {
|
||||
// The role id in the common remoteScriptAdminRoles.xml
|
||||
super("remote.script.run");
|
||||
|
||||
// Set up default values.
|
||||
String defaultTimeoutStr = System.getProperty(
|
||||
RemoteScriptConstants.scriptTimeoutKey,
|
||||
RemoteScriptConstants.scriptTimeoutDefault);
|
||||
int defaultTimeoutSec = -1;
|
||||
try {
|
||||
defaultTimeoutSec = Integer.parseInt(defaultTimeoutStr);
|
||||
} catch (NumberFormatException ex) {
|
||||
defaultTimeoutSec = -1;
|
||||
} finally {
|
||||
if (defaultTimeoutSec <= 0) {
|
||||
defaultTimeoutSec = Integer
|
||||
.parseInt(RemoteScriptConstants.scriptTimeoutDefault);
|
||||
}
|
||||
this.defaultTimeoutSec = defaultTimeoutSec;
|
||||
}
|
||||
|
||||
String defaultUseStdErr = System.getProperty(
|
||||
RemoteScriptConstants.scriptUseStdErrKey,
|
||||
RemoteScriptConstants.scriptUseStdErrDefault);
|
||||
this.defaultUseStdErrFlag = Boolean.parseBoolean(defaultUseStdErr);
|
||||
|
||||
String defaultSetupErrorStr = System.getProperty(
|
||||
RemoteScriptConstants.scriptSetupErrrorKey,
|
||||
RemoteScriptConstants.scriptUseStdErrDefault);
|
||||
int defaultSetupExit = 0;
|
||||
try {
|
||||
defaultSetupExit = Integer.parseInt(defaultSetupErrorStr);
|
||||
|
||||
} catch (NumberFormatException ex) {
|
||||
defaultSetupExit = 0;
|
||||
} finally {
|
||||
if (defaultSetupExit <= 0) {
|
||||
defaultSetupExit = Integer
|
||||
.parseInt(RemoteScriptConstants.scriptSetUpErrorDefault);
|
||||
}
|
||||
this.defaultSetupExit = defaultSetupExit;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.serialization.comm.IRequestHandler#handleRequest
|
||||
* (com.raytheon.uf.common.serialization.comm.IServerRequest)
|
||||
*/
|
||||
@Override
|
||||
public Object handleRequest(RemoteScriptRequest request) throws Exception {
|
||||
return super.handleRequest(request);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.edex.remote.script.RemoteScriptHandler#performRequest
|
||||
* (com.raytheon.uf.common.remote.script.RemoteScriptRequest)
|
||||
*/
|
||||
public Object performRequest(RemoteScriptRequest request) {
|
||||
RemoteScriptRunRequest req = (RemoteScriptRunRequest) request;
|
||||
RemoteScriptRunResponse result = new RemoteScriptRunResponse();
|
||||
|
||||
Map<String, String> propMap = req.getPropertyMap();
|
||||
|
||||
String timeoutValue = propMap
|
||||
.get(RemoteScriptConstants.scriptTimeoutKey);
|
||||
int timeoutSec = -1;
|
||||
if (timeoutValue == null) {
|
||||
timeoutSec = defaultTimeoutSec;
|
||||
} else {
|
||||
try {
|
||||
timeoutSec = Integer.parseInt(timeoutValue);
|
||||
if (timeoutSec <= 0) {
|
||||
timeoutSec = defaultTimeoutSec;
|
||||
}
|
||||
} catch (NumberFormatException ex) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
String.format("Bad timeout value %s", timeoutValue));
|
||||
timeoutSec = defaultTimeoutSec;
|
||||
} finally {
|
||||
if (timeoutSec <= 0) {
|
||||
timeoutSec = defaultTimeoutSec;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
long timeout = timeoutSec * TimeUtil.MILLIS_PER_SECOND;
|
||||
|
||||
String useStdErrString = propMap
|
||||
.get(RemoteScriptConstants.scriptUseStdErrKey);
|
||||
|
||||
boolean useStdErr = defaultUseStdErrFlag;
|
||||
if (useStdErrString != null) {
|
||||
useStdErr = Boolean.parseBoolean(useStdErrString);
|
||||
}
|
||||
|
||||
String setupExitValue = propMap
|
||||
.get(RemoteScriptConstants.scriptSetupErrrorKey);
|
||||
int setupExit = -1;
|
||||
|
||||
if (setupExitValue == null) {
|
||||
setupExit = defaultSetupExit;
|
||||
} else {
|
||||
|
||||
try {
|
||||
setupExit = Integer.parseInt(setupExitValue);
|
||||
} catch (NumberFormatException ex) {
|
||||
statusHandler.handle(Priority.PROBLEM, String.format(
|
||||
"Bad setup Error exit value %s", setupExitValue));
|
||||
setupExit = defaultSetupExit;
|
||||
} finally {
|
||||
if (setupExit <= 0) {
|
||||
setupExit = defaultSetupExit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<String> arguments = req.getScriptArguments();
|
||||
String script = req.getScript();
|
||||
LocalizationContext context = req.getContext();
|
||||
|
||||
IPathManager pm = PathManagerFactory.getPathManager();
|
||||
String name = scriptsDirectory + IPathManager.SEPARATOR + script;
|
||||
LocalizationFile lFile = pm.getLocalizationFile(context, name);
|
||||
File file = lFile.getFile();
|
||||
File dir = file.getParentFile();
|
||||
|
||||
if (!file.canExecute()) {
|
||||
String message = String.format("Not an executable script: \"%s\".",
|
||||
lFile);
|
||||
return sendMessage(result, message, useStdErr, setupExit);
|
||||
}
|
||||
|
||||
int maxTimeout = HttpClient.getInstance().getSocketTimeout()
|
||||
- BACKOFF_MSEC;
|
||||
if (maxTimeout <= 0) {
|
||||
String message = String
|
||||
.format("HttpClient's socket timeout of %d msec not enough time to run a remote script.",
|
||||
HttpClient.getInstance().getSocketTimeout());
|
||||
return sendMessage(result, message, useStdErr, setupExit);
|
||||
} else if (timeout > maxTimeout) {
|
||||
timeout = maxTimeout;
|
||||
}
|
||||
|
||||
List<String> args = new ArrayList<String>();
|
||||
args.add(file.getAbsolutePath());
|
||||
if (arguments != null && (arguments.size() > 0)) {
|
||||
args.addAll(arguments);
|
||||
}
|
||||
|
||||
ProcessBuilder pb = new ProcessBuilder(args);
|
||||
pb.redirectErrorStream(!useStdErr);
|
||||
pb.directory(dir);
|
||||
Process p = null;
|
||||
RunProcess rp = RunProcess.getRunProcess();
|
||||
String errorMessage = null;
|
||||
|
||||
// TODO - The timeout/destroy should be placed in RunProcess along with
|
||||
// limiting the size of stdout/stderr.
|
||||
try {
|
||||
p = pb.start();
|
||||
rp.setProcess(p);
|
||||
synchronized (rp) {
|
||||
rp.wait(timeout);
|
||||
if (!rp.isExecComplete()) {
|
||||
p.destroy();
|
||||
result.setTimedOut(true);
|
||||
rp.notify();
|
||||
errorMessage = "Script timed out.";
|
||||
}
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
errorMessage = "Problem running script: "
|
||||
+ ex.getLocalizedMessage().trim();
|
||||
statusHandler.handle(Priority.PROBLEM, errorMessage, ex);
|
||||
|
||||
} finally {
|
||||
if (p != null) {
|
||||
result.setOutput(rp.getStdout());
|
||||
result.setError(rp.getStderr());
|
||||
}
|
||||
result.setExitStatus(rp.waitFor());
|
||||
if (errorMessage != null) {
|
||||
if (useStdErr) {
|
||||
result.setError(result.getError() + "\n" + errorMessage);
|
||||
} else {
|
||||
result.setOutput(result.getOutput() + "\n" + errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Report a problem in running the script.
|
||||
*
|
||||
* @param result
|
||||
* @param message
|
||||
* @param useStdErr
|
||||
* @param setupExit
|
||||
* @return result
|
||||
*/
|
||||
private RemoteScriptRunResponse sendMessage(RemoteScriptRunResponse result,
|
||||
String message, boolean useStdErr, int setupExit) {
|
||||
statusHandler.handle(Priority.PROBLEM, message);
|
||||
|
||||
if (useStdErr) {
|
||||
result.setError(message);
|
||||
} else {
|
||||
result.setOutput(message);
|
||||
}
|
||||
|
||||
result.setExitStatus(setupExit);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<nwsRoleData xmlns:ns2="group">
|
||||
<application>Remote Script</application>
|
||||
<!-- Remote Script List permission.-->
|
||||
<permission id="remote.script.list">
|
||||
<description>
|
||||
This permission allows the user to retrive a listing of remote scripts.
|
||||
</description>
|
||||
</permission>
|
||||
<!-- Remote Script Execute permission. -->
|
||||
<permission id="remote.script.run">
|
||||
<description>
|
||||
This permission allows the user to execute a remote script.
|
||||
</description>
|
||||
</permission>
|
||||
<user userId="ALL">
|
||||
<userPermission>remote.script.list</userPermission>
|
||||
<userPermission>remote.script.run</userPermission>
|
||||
</user>
|
||||
</nwsRoleData>
|
175
edexOsgi/com.raytheon.uf.tools.cli/impl/src/pgen/ActivityUtil.py
Normal file
175
edexOsgi/com.raytheon.uf.tools.cli/impl/src/pgen/ActivityUtil.py
Normal file
|
@ -0,0 +1,175 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
##
|
||||
# This script is a collection of utility function to be used for extracting PGEN
|
||||
# products from EDEX and to store PGEN activities to EDEX.
|
||||
#
|
||||
# Users can override the default EDEX server and port name by specifying them
|
||||
# in the $DEFAULT_HOST and $DEFAULT_PORT shell environment variables.
|
||||
#
|
||||
##
|
||||
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
import lib.CommHandler as CH
|
||||
|
||||
class ActivityUtil:
|
||||
|
||||
#
|
||||
# Sends a CatalogQuery to the EDEX uEngine to get a list of
|
||||
# PGEN Activity Types, Subtypes, Labels, refTimes, and associated
|
||||
# dataURIs in the pgen database tables.
|
||||
#
|
||||
def getActivityMap(self):
|
||||
script='''import CatalogQuery
|
||||
query = CatalogQuery.CatalogQuery("pgen")
|
||||
query.addReturnedField("activityType")
|
||||
query.addReturnedField("activitySubtype")
|
||||
query.addReturnedField("activityLabel")
|
||||
query.addReturnedField("dataTime.refTime")
|
||||
query.addReturnedField("activityName")
|
||||
query.addReturnedField("dataURI")
|
||||
return query.execute()'''
|
||||
|
||||
service = '/services/pyproductjaxb'
|
||||
host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
port = os.getenv("DEFAULT_PORT", "9581")
|
||||
connection=str(host+":"+port)
|
||||
ch = CH.CommHandler(connection,service)
|
||||
ch.process(script)
|
||||
|
||||
if not ch.isGoodStatus():
|
||||
print ch.formatResponse()
|
||||
exit(1)
|
||||
|
||||
return self.__generateMap( ch.getContents() )
|
||||
|
||||
#
|
||||
# Generates a map of activity types/subtypes, labels, refTimes, and dataURIs from
|
||||
# the XML returned from EDEX uEngine
|
||||
#
|
||||
# The map is a dictionary (dict) of Activity Types in form of "type(subtype)" whose values
|
||||
# are a list of dicts which have keys "activityType", "activityLabel", "dataTime.refTime",
|
||||
# and "dataURI".
|
||||
#
|
||||
def __generateMap(self, xml):
|
||||
aMap = dict()
|
||||
tree = ET.fromstring(xml)
|
||||
for item in tree.iter('items'):
|
||||
record = dict()
|
||||
for attr in item.iter('attributes'):
|
||||
record.update( {attr.attrib['field'] : attr.attrib['value'] } )
|
||||
|
||||
atype = record['activityType']
|
||||
stype = record['activitySubtype']
|
||||
if ( stype != None and len(stype.lstrip()) > 0):
|
||||
atype = atype + "(" + stype.lstrip() + ")"
|
||||
|
||||
if aMap.has_key(atype):
|
||||
aMap[atype].append(record)
|
||||
else:
|
||||
aMap.update( {atype: [record]} )
|
||||
|
||||
return aMap
|
||||
|
||||
#
|
||||
# Compare if a command line string matches an string in activity.
|
||||
# This uses string methods.
|
||||
#
|
||||
def matcher(self, cmdstr, activitystr):
|
||||
|
||||
matched = False
|
||||
if cmdstr == None:
|
||||
matched = True
|
||||
else:
|
||||
if activitystr == None:
|
||||
matched = False;
|
||||
else:
|
||||
realstr = cmdstr.strip("*")
|
||||
if ( cmdstr.startswith("*") ):
|
||||
if ( cmdstr.endswith("*") ):
|
||||
if ( activitystr.find( realstr ) >= 0 ):
|
||||
matched = True
|
||||
else:
|
||||
if activitystr.endswith( realstr):
|
||||
matched = True
|
||||
elif cmdstr.endswith("*"):
|
||||
if activitystr.startswith( realstr):
|
||||
matched = True
|
||||
else:
|
||||
if ( activitystr == cmdstr ):
|
||||
matched = True
|
||||
|
||||
return matched
|
||||
|
||||
#
|
||||
# Compare if a command line string matches an string in activity.
|
||||
# This uses regular expression matching.
|
||||
#
|
||||
# cmdstr - input from command line, could use "*" anywhere to match one or more character.
|
||||
# activitystr - value saved in PGEN DB for an activity, such as type, label, ...
|
||||
#
|
||||
def stringMatcher(self, cmdstr, activitystr):
|
||||
|
||||
matched = False
|
||||
|
||||
if cmdstr == None:
|
||||
matched = True
|
||||
elif activitystr == None:
|
||||
matched = False
|
||||
else:
|
||||
#parenthese should be escaped.
|
||||
ps = cmdstr.replace("(", "\(")
|
||||
pe = ps.replace(")", "\)")
|
||||
|
||||
# "*" could match any one or more characters.
|
||||
pn = pe.replace("*", "(.*)")
|
||||
|
||||
mb = re.match(pn, activitystr)
|
||||
if mb != None:
|
||||
matched = True
|
||||
|
||||
return matched
|
||||
|
||||
#
|
||||
# This method sends a CatalogQuery request to the EDEX uEngine
|
||||
# for the dataURI associated with the given activity type and label
|
||||
#
|
||||
def getDataURI( self, atype, label):
|
||||
|
||||
script='''import CatalogQuery
|
||||
query = CatalogQuery.CatalogQuery("pgen")
|
||||
query.addConstraint("activityType","{0}","=")
|
||||
query.addConstraint("activityLabel","{1}","=")
|
||||
query.addReturnedField("dataURI")
|
||||
query.addReturnedField("dataTime.refTime")
|
||||
return query.execute()'''.format(atype,label)
|
||||
|
||||
|
||||
service = '/services/pyproductjaxb'
|
||||
host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
port = os.getenv("DEFAULT_PORT", "9581")
|
||||
connection=str(host+":"+port)
|
||||
ch = CH.CommHandler(connection,service)
|
||||
ch.process(script)
|
||||
|
||||
if not ch.isGoodStatus():
|
||||
print ch.formatResponse()
|
||||
exit(1)
|
||||
|
||||
logger.debug( ch.getContents() )
|
||||
return __parseResponse( ch.getContents() )
|
||||
|
||||
#
|
||||
# Parses the XML response from the uEngine and extracts
|
||||
# the value for the dataURI field. If multiple are returned, the last
|
||||
# one is used.
|
||||
#
|
||||
def __parseResponse(self, xml):
|
||||
tree = ET.fromstring(xml)
|
||||
for attr in tree.iter('attributes'):
|
||||
if attr.attrib['field'] == 'dataURI':
|
||||
duri = attr.attrib['value']
|
||||
|
||||
return duri
|
|
@ -11,6 +11,7 @@ class ProductRetriever:
|
|||
def __init__(self,dataURI,label):
|
||||
self.dataURI = dataURI
|
||||
self.label = label
|
||||
self.fullpath = False
|
||||
self.outdir = os.getcwd()
|
||||
self.host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
|
@ -18,6 +19,9 @@ class ProductRetriever:
|
|||
|
||||
def setOutputDir(self, outdir):
|
||||
self.outdir = outdir
|
||||
|
||||
def setFullpath(self, fullpath):
|
||||
self.fullpath = fullpath
|
||||
|
||||
def _writeout(self,filename,bytes):
|
||||
outname = self.outdir + str(os.sep) + filename
|
||||
|
@ -39,12 +43,18 @@ class ProductRetriever:
|
|||
filename = self.label + ".xml"
|
||||
else:
|
||||
filename = item.getName()
|
||||
|
||||
print "Extracting... " + filename
|
||||
|
||||
if ( self.fullpath ):
|
||||
path = self.dataURI
|
||||
fname = path.replace("/", ".") + "$" + filename
|
||||
filename = fname.lstrip().strip(".").replace("..", ".")
|
||||
|
||||
|
||||
if isinstance(item, StringDataRecord):
|
||||
self._writeout(filename,item.getStringData()[0])
|
||||
elif isinstance(item, ByteDataRecord):
|
||||
self._writeout(filename,item.getByteData())
|
||||
print "Extracted... " + filename
|
||||
|
||||
return resp
|
||||
|
||||
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
##
|
||||
# This script is used to extract PGEN products from EDEX.
|
||||
# It can be run in batch mode by specifying the "-l" and "-t" options on the
|
||||
# command line. Optionally, users can run it in interactive mode by invoking it
|
||||
# with no argument.
|
||||
# It can be run in batch mode by specifying the "-l", "-t", "-d", "-st", "-n", and
|
||||
# "-p" options on the command line. Optionally, users can run it in interactive
|
||||
# mode by invoking it with no argument.
|
||||
#
|
||||
# Users can override the default EDEX server and port name by specifying them
|
||||
# in the $DEFAULT_HOST and $DEFAULT_PORT shell environment variables.
|
||||
|
@ -19,6 +19,7 @@ from Tkinter import *
|
|||
from ufpy import UsageArgumentParser
|
||||
import lib.CommHandler as CH
|
||||
import ProductRetriever
|
||||
import ActivityUtil
|
||||
|
||||
logger = None
|
||||
def __initLogger():
|
||||
|
@ -33,68 +34,53 @@ def __initLogger():
|
|||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
#
|
||||
# Parses command line input and store in "options".
|
||||
#
|
||||
def __parseCommandLine():
|
||||
parser = UsageArgumentParser.UsageArgumentParser(prog='retrieveActivity',description="Retrieve PGEN Activities from EDEX. When invoked without any arguments, retrieveActivity is run in interactive mode.")
|
||||
bgroup = parser.add_argument_group(title='batch',description='For running in scripts and/or batch mode.')
|
||||
|
||||
bgroup.add_argument("-l", action="store", dest="label",
|
||||
bgroup.add_argument("-l*", action="store", dest="label",
|
||||
help="Activity Label being requested",
|
||||
required=False, metavar="label")
|
||||
bgroup.add_argument("-t", action="store", dest="type",
|
||||
bgroup.add_argument("-t*", action="store", dest="type",
|
||||
help="Activity Type being requested",
|
||||
required=False, metavar="type")
|
||||
bgroup.add_argument("-st", action="store", dest="subtype",
|
||||
help="Activity Subtype being requested",
|
||||
required=False, metavar="subtype")
|
||||
bgroup.add_argument("-d*", action="store", dest="reftime",
|
||||
help="Activity Ref Time being requested (YYYY-MM-DD_HH:MM)",
|
||||
required=False, metavar="reftime")
|
||||
bgroup.add_argument("-n*", action="store", dest="name",
|
||||
help="Activity Name being requested",
|
||||
required=False, metavar="name")
|
||||
bgroup.add_argument("-f", action="store", dest="fullpath",
|
||||
help="Write out XML with full path? (Yes/No)",
|
||||
required=False, metavar="fullpath")
|
||||
|
||||
bgroup = parser.add_argument_group(title='Note',description='Pattern match with "*" is allowed for -l, -t, -d, and -n. E.g, -l "*CCFP*3*" will match any activities whose label contains CCFP and 3.')
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
options.interactive = False
|
||||
if options.label == None and options.type == None :
|
||||
options.interactive = True
|
||||
elif options.label == None or options.type == None :
|
||||
print "Must enter values for both arguments -l and -t"
|
||||
exit(0)
|
||||
|
||||
if (options.label == None and options.type == None and
|
||||
options.reftime == None and options.subtype == None and
|
||||
options.fullpath == None and options.name == None):
|
||||
options.interactive = True
|
||||
else:
|
||||
if (options.label == None and options.type == None and
|
||||
options.reftime == None and options.name == None):
|
||||
print "Must enter values for at least one of -l, -t, -d, or -n"
|
||||
exit(0)
|
||||
|
||||
logger.debug("Command-line arguments: " + str(options))
|
||||
return options
|
||||
|
||||
#
|
||||
# This method sends a CatalogQuery request to the EDEX uEngine
|
||||
# for the dataURI associated with the given activity type and label
|
||||
# Main program.
|
||||
#
|
||||
def __getDataURI( type, label):
|
||||
script='''import CatalogQuery
|
||||
query = CatalogQuery.CatalogQuery("pgen")
|
||||
query.addConstraint("activityType","{0}","=")
|
||||
query.addConstraint("activityLabel","{1}","=")
|
||||
query.addReturnedField("dataURI")
|
||||
query.addReturnedField("dataTime.refTime")
|
||||
return query.execute()'''.format(type,label)
|
||||
|
||||
service = '/services/pyproductjaxb'
|
||||
host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
port = os.getenv("DEFAULT_PORT", "9581")
|
||||
connection=str(host+":"+port)
|
||||
ch = CH.CommHandler(connection,service)
|
||||
ch.process(script)
|
||||
|
||||
if not ch.isGoodStatus():
|
||||
print ch.formatResponse()
|
||||
exit(1)
|
||||
|
||||
logger.debug( ch.getContents() )
|
||||
return __parseResponse( ch.getContents() )
|
||||
|
||||
#
|
||||
# Parses the XML response from the uEngine and extracts
|
||||
# the value for the dataURI field. If multiple are returned, the last
|
||||
# one is used.
|
||||
#
|
||||
def __parseResponse(xml):
|
||||
tree = ET.fromstring(xml)
|
||||
for attr in tree.iter('attributes'):
|
||||
if attr.attrib['field'] == 'dataURI':
|
||||
duri = attr.attrib['value']
|
||||
|
||||
return duri
|
||||
|
||||
def main():
|
||||
__initLogger()
|
||||
logger.info("Starting retrieveActivity.")
|
||||
|
@ -109,21 +95,51 @@ def main():
|
|||
app.mainloop()
|
||||
root.destroy()
|
||||
else:
|
||||
# Retrieve products for given activity type and label
|
||||
logger.info("looking for Product: " + options.type + " - " + options.label)
|
||||
dataURI = __getDataURI(options.type, options.label)
|
||||
logger.debug("Found dataURI = " + dataURI)
|
||||
# Retrieve all activities abnd build a map of record using
|
||||
# type(subtype) as key.
|
||||
mu = ActivityUtil.ActivityUtil()
|
||||
activityMap = mu.getActivityMap()
|
||||
|
||||
reqtype = None
|
||||
if ( options.type != None ):
|
||||
reqtype = options.type;
|
||||
if ( options.subtype != None ) :
|
||||
reqtype = options.type + "(" + options.subtype + ")"
|
||||
|
||||
records = []
|
||||
for key in activityMap.iterkeys():
|
||||
recs = activityMap[key]
|
||||
for rec in recs:
|
||||
if ( mu.stringMatcher(options.label, rec["activityLabel"]) and
|
||||
mu.stringMatcher(reqtype, key ) and
|
||||
mu.stringMatcher(options.name, rec["activityName"] ) ):
|
||||
#Remove sec.msec from record's refTime
|
||||
dbRefTime = rec["dataTime.refTime"]
|
||||
dotIndex = dbRefTime.rfind(":")
|
||||
if ( dotIndex > 0 ):
|
||||
shortTime = dbRefTime[:dotIndex]
|
||||
else:
|
||||
shortTime = dbRefTime
|
||||
|
||||
#Replace the "_" with a whitespace in reftime.
|
||||
optionTime = options.reftime.replace("_", " ")
|
||||
|
||||
if ( mu.stringMatcher( optionTime, shortTime ) ):
|
||||
records.append( rec )
|
||||
|
||||
for rec in records:
|
||||
pr = ProductRetriever.ProductRetriever(rec["dataURI"], rec["activityLabel"])
|
||||
if options.fullpath != None and options.fullpath.upper().startswith("Y"):
|
||||
pr.setFullpath(True)
|
||||
pr.getProducts()
|
||||
|
||||
pr = ProductRetriever.ProductRetriever(dataURI, options.label)
|
||||
outdir = os.getcwd() + str(os.sep) + options.type + str(os.sep) + options.label + str(os.sep)
|
||||
#pr.setOutputDir(outdir)
|
||||
pr.getProducts()
|
||||
|
||||
#print "Products were written to directory: " + outdir
|
||||
logger.info("retrieveActivity is complete.")
|
||||
|
||||
#
|
||||
# Interactive GUI for PGEN activity retrieval
|
||||
#
|
||||
class RetrieveGui(Frame):
|
||||
""" Interactive GUI for PGEN product retrieval """
|
||||
""" Interactive GUI for PGEN activity retrieval """
|
||||
|
||||
def __init__(self, master=None):
|
||||
""" Initialize Frame and create widgets """
|
||||
|
@ -136,15 +152,16 @@ class RetrieveGui(Frame):
|
|||
# if an activity type and label have been selected, get products and write them out.
|
||||
if len(self.typeList.curselection()) != 0 and len(self.nameList.curselection()) != 0:
|
||||
type = self.typeList.get(self.typeList.curselection())
|
||||
label = self.nameList.get(self.nameList.curselection())
|
||||
labelindex = int(self.nameList.curselection()[0])
|
||||
dataURI = self.activityMap[type][labelindex]['dataURI']
|
||||
|
||||
pr = ProductRetriever.ProductRetriever(dataURI, label)
|
||||
#outdir = os.getcwd() + str(os.sep) + options.type + str(os.sep) + options.label + str(os.sep)
|
||||
#pr.setOutputDir(outdir)
|
||||
pr.getProducts()
|
||||
|
||||
items = self.nameList.curselection()
|
||||
for i in items :
|
||||
idx = int(i)
|
||||
label = self.nameList.get(idx)
|
||||
dataURI = self.activityMap[type][idx]['dataURI']
|
||||
|
||||
pr = ProductRetriever.ProductRetriever(dataURI, label)
|
||||
pr.getProducts()
|
||||
|
||||
def createWidgets(self):
|
||||
activityType = Label(self)
|
||||
activityType["text"] = "Activity Type"
|
||||
|
@ -172,7 +189,7 @@ class RetrieveGui(Frame):
|
|||
frame2 = Frame(self)
|
||||
vscrollbar2 = Scrollbar(frame2, orient=VERTICAL)
|
||||
hscrollbar2 = Scrollbar(frame2, orient=HORIZONTAL)
|
||||
self.nameList = Listbox(frame2,yscrollcommand=vscrollbar2.set,xscrollcommand=hscrollbar2.set,exportselection=0, width=50,height=15,bg="white")
|
||||
self.nameList = Listbox(frame2,yscrollcommand=vscrollbar2.set,xscrollcommand=hscrollbar2.set,exportselection=0, width=50,height=15,bg="white", selectmode=EXTENDED)
|
||||
vscrollbar2.config(command=self.nameList.yview)
|
||||
hscrollbar2.config(command=self.nameList.xview)
|
||||
vscrollbar2.pack(side=RIGHT, fill=Y)
|
||||
|
@ -197,7 +214,7 @@ class RetrieveGui(Frame):
|
|||
# Get all Activity Types and Labels from EDEX for use in selection ListBoxes.
|
||||
# Insert list of Types in Type Listbox
|
||||
#
|
||||
self.activityMap = self.__getActivityMap()
|
||||
self.activityMap = ActivityUtil.ActivityUtil().getActivityMap()
|
||||
self.typeList.delete(0,END)
|
||||
for key in self.activityMap.iterkeys():
|
||||
self.typeList.insert(END,key)
|
||||
|
@ -225,62 +242,8 @@ class RetrieveGui(Frame):
|
|||
def typeList_has_changed(self, index):
|
||||
self.nameList.delete(0,END)
|
||||
for label in self.activityMap[ self.typeList.get(index) ]:
|
||||
#print label
|
||||
self.nameList.insert(END, label['activityLabel'])
|
||||
|
||||
#
|
||||
# Sends a CatalogQuery to the EDEX uEngine to get a list of
|
||||
# PGEN Activity TYpes, Labels, and associated dataURIs
|
||||
# in the pgen database tables.
|
||||
#
|
||||
def __getActivityMap(self):
|
||||
script='''import CatalogQuery
|
||||
query = CatalogQuery.CatalogQuery("pgen")
|
||||
query.addReturnedField("activityType")
|
||||
query.addReturnedField("activityLabel")
|
||||
query.addReturnedField("dataURI")
|
||||
return query.execute()'''
|
||||
|
||||
service = '/services/pyproductjaxb'
|
||||
host = os.getenv("DEFAULT_HOST", "localhost")
|
||||
port = os.getenv("DEFAULT_PORT", "9581")
|
||||
connection=str(host+":"+port)
|
||||
ch = CH.CommHandler(connection,service)
|
||||
ch.process(script)
|
||||
|
||||
if not ch.isGoodStatus():
|
||||
print ch.formatResponse()
|
||||
exit(1)
|
||||
|
||||
logger.debug( ch.getContents() )
|
||||
return self.__generateMap( ch.getContents() )
|
||||
|
||||
#
|
||||
# Generates a map of activity types, label, and dataURIs from
|
||||
# the XML returned from EDEX uEngine for use in the activity type and label
|
||||
# Listboxes.
|
||||
#
|
||||
# The map is a dictionary (dict) of Activity Types whose values are a list of dicts
|
||||
# which have keys "activityType", "activityLabel", and "dataURI".
|
||||
#
|
||||
def __generateMap(self, xml):
|
||||
aMap = dict()
|
||||
tree = ET.fromstring(xml)
|
||||
for item in tree.iter('items'):
|
||||
#print item.attrib['key']
|
||||
record = dict()
|
||||
for attr in item.iter('attributes'):
|
||||
record.update( {attr.attrib['field'] : attr.attrib['value'] } )
|
||||
#print record
|
||||
|
||||
atype = record['activityType']
|
||||
if aMap.has_key(atype):
|
||||
aMap[atype].append(record)
|
||||
else:
|
||||
aMap.update( {atype: [record]} )
|
||||
|
||||
return aMap
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
|
@ -56,14 +56,69 @@ def __parseCommandLine():
|
|||
logger.debug("Command-line arguments: " + str(options))
|
||||
return options
|
||||
|
||||
def __getActivityInfo(options):
|
||||
#
|
||||
# create an ActivityInfo object from command line input and
|
||||
# activityXML. If no input was found from command line, the
|
||||
# info in activityXML will be used.
|
||||
#
|
||||
def __getActivityInfo(xml, options):
|
||||
|
||||
ainfo = ActivityInfo()
|
||||
ainfo.setActivityLabel(options.filename)
|
||||
ainfo.setActivityName(options.activityName)
|
||||
ainfo.setActivityType(options.activityType)
|
||||
ainfo.setActivitySubtype(options.activitySubtype)
|
||||
ainfo.setForecaster(options.forecaster)
|
||||
ainfo.setSite(options.site)
|
||||
|
||||
tree = ET.fromstring(xml)
|
||||
product = tree.find('Product')
|
||||
|
||||
# strip the path from file and use it as activityLabel
|
||||
|
||||
fullname = options.filename
|
||||
lastslash = fullname.rfind("/")
|
||||
filename = fullname
|
||||
if ( lastslash >=0 ):
|
||||
filename = fullname[lastslash+1:]
|
||||
|
||||
ainfo.setActivityLabel(filename)
|
||||
|
||||
if ( options.forecaster != None ):
|
||||
ainfo.setForecaster(options.forecaster)
|
||||
else:
|
||||
ainfo.setForecaster(product.attrib['forecaster'])
|
||||
|
||||
if ( options.site != None ):
|
||||
ainfo.setSite(options.site)
|
||||
else:
|
||||
ainfo.setSite(product.attrib['center'])
|
||||
|
||||
# set activity type and subtype
|
||||
if ( options.activityType != None ):
|
||||
ainfo.setActivityType(options.activityType)
|
||||
|
||||
if ( options.activitySubtype != None ):
|
||||
ainfo.setActivitySubtype(options.activitySubtype)
|
||||
else:
|
||||
ainfo.setActivitySubtype("")
|
||||
else:
|
||||
if (product.attrib['type'].find("(") < 0 ):
|
||||
ainfo.setActivityType( product.attrib['type'] )
|
||||
ainfo.setActivitySubtype( "" )
|
||||
else:
|
||||
fulltyp = product.attrib['type']
|
||||
start = fulltyp.find("(")
|
||||
end = fulltyp.find(")")
|
||||
ainfo.setActivityType( fulltyp[0:start] )
|
||||
ainfo.setActivitySubtype( fulltyp[start+1:end] )
|
||||
|
||||
# set activityName
|
||||
if ( options.activityName != None ):
|
||||
ainfo.setActivityName(options.activityName)
|
||||
else:
|
||||
if ( options.activityType != None ):
|
||||
aname = options.activityType;
|
||||
if ( options.activitySubtype != None ):
|
||||
aname = aname + "(" + options.activitySubtype + ")"
|
||||
ainfo.setActivityName( aname )
|
||||
else:
|
||||
ainfo.setActivityName( product.attrib['name'] )
|
||||
|
||||
return ainfo
|
||||
|
||||
# Update Product tag attributes with options given on command line
|
||||
|
@ -72,9 +127,13 @@ def __updateXML(xml, options):
|
|||
product = tree.find('Product')
|
||||
if options.activityName != None:
|
||||
product.attrib['name'] = options.activityName
|
||||
|
||||
|
||||
if options.activityType != None:
|
||||
product.attrib['type'] = options.activityType
|
||||
if options.activitySubtype != None:
|
||||
ntype = options.activityType + '(' + options.activitySubtype + ')'
|
||||
product.attrib['type'] = ntype
|
||||
else:
|
||||
product.attrib['type'] = options.activityType
|
||||
|
||||
if options.filename != None:
|
||||
product.attrib['outputFile'] = options.filename
|
||||
|
@ -84,11 +143,12 @@ def __updateXML(xml, options):
|
|||
|
||||
if options.site != None:
|
||||
product.attrib['center'] = options.site
|
||||
|
||||
return ET.tostring(tree)
|
||||
|
||||
def main():
|
||||
__initLogger()
|
||||
logger.info("Starting retrieveActivity.")
|
||||
logger.info("Starting storeActivity.")
|
||||
options = __parseCommandLine()
|
||||
|
||||
# read in XML from input file
|
||||
|
@ -98,7 +158,7 @@ def main():
|
|||
|
||||
# generate an activityInfo object and update XML with options
|
||||
# from command line
|
||||
actinfo = __getActivityInfo(options)
|
||||
actinfo = __getActivityInfo(activityXML, options)
|
||||
activityXML = __updateXML(activityXML, options)
|
||||
|
||||
# Store Activity to EDEX
|
||||
|
@ -110,4 +170,4 @@ def main():
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
|
@ -56,6 +56,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* PluginDataObject.
|
||||
* July 29, 2013 1028 ghull add AwwReportType enum
|
||||
* Feb 11, 2014 2784 rferrel Remove override of setIdentifier.
|
||||
* July 07, 2014 ??? D. Sushon add handling for TORNADO_WATCH in getReportType(..)
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -111,8 +112,12 @@ public class AwwRecord extends PluginDataObject {
|
|||
if (rtStr.equals("THUNDERSTORM_REPORT")) {
|
||||
return SEVERE_THUNDERSTORM_WATCH;
|
||||
}
|
||||
if (rtStr.endsWith("STATUS REPORT")) {
|
||||
if (rtStr.equals("TORNADO_REPORT")) {
|
||||
return TORNADO_WATCH;
|
||||
}
|
||||
if (rtStr.endsWith("STATUS_REPORT")) {
|
||||
// ??? return AwwReportType.SEVERE_WEATHER_STATUS_NOTIFICATION
|
||||
return AwwReportType.STATUS_REPORT;
|
||||
}
|
||||
// WSTM is looking for
|
||||
if (rtStr.equals("WINTER_STORM")) {
|
||||
|
|
|
@ -7,12 +7,12 @@ Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
|||
Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174",
|
||||
javax.persistence;bundle-version="1.0.0"
|
||||
Export-Package: gov.noaa.nws.ncep.common.dataplugin.geomag,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag.calculation,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag.dao,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag.exception,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag.request,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag.table,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag.util
|
||||
Import-Package: com.raytheon.uf.common.localization,
|
||||
gov.noaa.nws.ncep.common.dataplugin.geomag,
|
||||
com.raytheon.uf.common.serialization.comm,
|
||||
org.apache.commons.logging
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,6 @@ import javax.persistence.GenerationType;
|
|||
import javax.persistence.Id;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
//import javax.persistence.UniqueConstraint;
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
|
@ -22,14 +21,17 @@ import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
|
|||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
//import javax.persistence.UniqueConstraint;
|
||||
|
||||
/**
|
||||
* Record implementation for geomag avg.
|
||||
* Record implementation for geomag avg.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ---------------- --------------------------
|
||||
* 08/14/2013 T989 qzhou Initial creation.
|
||||
* 03/03/2014 qzhou modified get/set
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
|
@ -44,26 +46,26 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
@DynamicSerialize
|
||||
public class GeoMagAvg extends PersistableDataObject<Object> {
|
||||
|
||||
|
||||
/**
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
public static final String ID_GEN = "idgen";
|
||||
|
||||
/** The id */
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public static final String ID_GEN = "idgen";
|
||||
|
||||
/** The id */
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_GEN)
|
||||
private Integer id;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* station code
|
||||
*/
|
||||
@Column
|
||||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private String stationCode;
|
||||
|
||||
|
||||
/**
|
||||
* time tag
|
||||
*/
|
||||
|
@ -71,7 +73,7 @@ public class GeoMagAvg extends PersistableDataObject<Object> {
|
|||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private Date avgTime;
|
||||
|
||||
|
||||
/**
|
||||
* insert time tag
|
||||
*/
|
||||
|
@ -79,54 +81,53 @@ public class GeoMagAvg extends PersistableDataObject<Object> {
|
|||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private Date insertTime;
|
||||
|
||||
|
||||
/**
|
||||
* H data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@XmlAttribute
|
||||
@Column
|
||||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private float hHrAvg;
|
||||
|
||||
/**
|
||||
private float hHrAvg;
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@XmlAttribute
|
||||
@Column
|
||||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private float dHrAvg;
|
||||
|
||||
|
||||
public GeoMagAvg() {
|
||||
|
||||
}
|
||||
|
||||
public void generateId() {
|
||||
|
||||
public GeoMagAvg() {
|
||||
|
||||
}
|
||||
|
||||
public void generateId() {
|
||||
this.id = hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
*/
|
||||
public float getHHrAvg() {
|
||||
public float gethHrAvg() {
|
||||
return hHrAvg;
|
||||
}
|
||||
|
||||
public void setHHrAvg(float hHrAvg) {
|
||||
public void sethHrAvg(float hHrAvg) {
|
||||
this.hHrAvg = hHrAvg;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
*/
|
||||
public float getDHrAvg() {
|
||||
public float getdHrAvg() {
|
||||
return dHrAvg;
|
||||
}
|
||||
|
||||
public void setDHrAvg(float dHrAvg) {
|
||||
public void setdHrAvg(float dHrAvg) {
|
||||
this.dHrAvg = dHrAvg;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return The id
|
||||
*/
|
||||
|
@ -138,7 +139,7 @@ public class GeoMagAvg extends PersistableDataObject<Object> {
|
|||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @return the timeTag
|
||||
*/
|
||||
public Date getAvgTime() {
|
||||
|
@ -148,9 +149,9 @@ public class GeoMagAvg extends PersistableDataObject<Object> {
|
|||
public void setAvgTime(Date avgTime) {
|
||||
this.avgTime = avgTime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the timeTag
|
||||
* @return the insert time
|
||||
*/
|
||||
public Date getInsertTime() {
|
||||
return insertTime;
|
||||
|
@ -159,7 +160,7 @@ public class GeoMagAvg extends PersistableDataObject<Object> {
|
|||
public void setInsertTime(Date insertTime) {
|
||||
this.insertTime = insertTime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the stationCode
|
||||
*/
|
||||
|
|
|
@ -22,13 +22,14 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
|||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* Record implementation for geomag k 1 min.
|
||||
* Record implementation for geomag k 1 min.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ---------------- --------------------------
|
||||
* 08/14/2013 T989 qzhou Initial creation.
|
||||
* 03/03/2014 #1110 qzhou modified get/set
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
|
@ -43,25 +44,26 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
@DynamicSerialize
|
||||
public class GeoMagK1min extends PersistableDataObject<Object> {
|
||||
|
||||
/**
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
public static final String ID_GEN = "idgen";
|
||||
|
||||
/** The id */
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public static final String ID_GEN = "idgen";
|
||||
|
||||
/** The id */
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_GEN)
|
||||
private Integer id;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* station code
|
||||
*/
|
||||
@Column
|
||||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private String stationCode;
|
||||
|
||||
|
||||
/**
|
||||
* time tag
|
||||
*/
|
||||
|
@ -69,7 +71,7 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private Date refTime;
|
||||
|
||||
|
||||
/**
|
||||
* insert time tag
|
||||
*/
|
||||
|
@ -77,110 +79,108 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
@XmlAttribute
|
||||
@DynamicSerializeElement
|
||||
private Date lastUpdate;
|
||||
|
||||
|
||||
/**
|
||||
* H data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private int kestIndex;
|
||||
|
||||
/**
|
||||
private int kestIndex;
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float kestReal;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float kestGamma;
|
||||
|
||||
|
||||
/**
|
||||
* H data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private int hkIndex;
|
||||
|
||||
/**
|
||||
private int hkIndex;
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float hkReal;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float hkGamma;
|
||||
|
||||
|
||||
/**
|
||||
* H data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private int dkIndex;
|
||||
|
||||
/**
|
||||
private int dkIndex;
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float dkReal;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float dkGamma;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private int hCount;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private int dCount;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private int aest;
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* D data Hour Average
|
||||
*/
|
||||
@Column(length=16)
|
||||
@Column(length = 16)
|
||||
@DynamicSerializeElement
|
||||
private float ks;
|
||||
|
||||
|
||||
|
||||
public GeoMagK1min() {
|
||||
|
||||
}
|
||||
|
||||
public void generateId() {
|
||||
|
||||
public GeoMagK1min() {
|
||||
|
||||
}
|
||||
|
||||
public void generateId() {
|
||||
this.id = hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
|
||||
/**
|
||||
* @return the kestIndex
|
||||
*/
|
||||
public int getKestIndex() {
|
||||
return kestIndex;
|
||||
|
@ -189,9 +189,9 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setKestIndex(int kestIndex) {
|
||||
this.kestIndex = kestIndex;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
* @return the kestReal
|
||||
*/
|
||||
public float getKestReal() {
|
||||
return kestReal;
|
||||
|
@ -200,9 +200,9 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setKestReal(float kestReal) {
|
||||
this.kestReal = kestReal;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
* @return the kestGamma
|
||||
*/
|
||||
public float getKestGamma() {
|
||||
return kestGamma;
|
||||
|
@ -211,95 +211,95 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setKestGamma(float kestGamma) {
|
||||
this.kestGamma = kestGamma;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
|
||||
/**
|
||||
* @return the hkReal
|
||||
*/
|
||||
public float getHkReal() {
|
||||
return hkReal;
|
||||
}
|
||||
|
||||
public void setHkReal(float hkReal) {
|
||||
this.hkReal = hkReal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hkGamma
|
||||
*/
|
||||
public float getHkGamma() {
|
||||
return hkGamma;
|
||||
}
|
||||
|
||||
public void setHkGamma(float hkGamma) {
|
||||
this.hkGamma = hkGamma;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hkIndex
|
||||
*/
|
||||
public int getHkIndex() {
|
||||
return hkIndex;
|
||||
}
|
||||
|
||||
public void setHKIndex(int hkIndex) {
|
||||
public void setHkIndex(int hkIndex) {
|
||||
this.hkIndex = hkIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
*/
|
||||
public float getHKReal() {
|
||||
return hkReal;
|
||||
}
|
||||
|
||||
public void setHKReal(float hkReal) {
|
||||
this.hkReal = hkReal;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
* @return the dkIndex
|
||||
*/
|
||||
public float getHKGamma() {
|
||||
return hkGamma;
|
||||
}
|
||||
|
||||
public void setHKGamma(float hkGamma) {
|
||||
this.hkGamma = hkGamma;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
*/
|
||||
public int getDKIndex() {
|
||||
public int getDkIndex() {
|
||||
return dkIndex;
|
||||
}
|
||||
|
||||
public void setDKIndex(int dkIndex) {
|
||||
public void setDkIndex(int dkIndex) {
|
||||
this.dkIndex = dkIndex;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
* @return the dkReal
|
||||
*/
|
||||
public float getDKReal() {
|
||||
public float getDkReal() {
|
||||
return dkReal;
|
||||
}
|
||||
|
||||
public void setDKReal(float dkReal) {
|
||||
public void setDkReal(float dkReal) {
|
||||
this.dkReal = dkReal;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the hHrAvg
|
||||
* @return the dkGamma
|
||||
*/
|
||||
public float getDKGamma() {
|
||||
public float getDkGamma() {
|
||||
return dkGamma;
|
||||
}
|
||||
|
||||
public void setDKGamma(float dkGamma) {
|
||||
public void setDkGamma(float dkGamma) {
|
||||
this.dkGamma = dkGamma;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
* @return the hCount
|
||||
*/
|
||||
public float getHCount() {
|
||||
public int gethCount() {
|
||||
return hCount;
|
||||
}
|
||||
|
||||
public void setHCount(int hCount) {
|
||||
public void sethCount(int hCount) {
|
||||
this.hCount = hCount;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
* @return the dCount
|
||||
*/
|
||||
public int getDCount() {
|
||||
public int getdCount() {
|
||||
return dCount;
|
||||
}
|
||||
|
||||
public void setDCount(int dCount) {
|
||||
public void setdCount(int dCount) {
|
||||
this.dCount = dCount;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
*/
|
||||
|
@ -310,7 +310,7 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setAest(int aest) {
|
||||
this.aest = aest;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the dHrAvg
|
||||
*/
|
||||
|
@ -321,7 +321,7 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setKs(float ks) {
|
||||
this.ks = ks;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return The id
|
||||
*/
|
||||
|
@ -333,7 +333,7 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* @return the timeTag
|
||||
*/
|
||||
public Date getRefTime() {
|
||||
|
@ -343,7 +343,7 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setRefTime(Date refTime) {
|
||||
this.refTime = refTime;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the timeTag
|
||||
*/
|
||||
|
@ -354,7 +354,7 @@ public class GeoMagK1min extends PersistableDataObject<Object> {
|
|||
public void setLastUpdate(Date lastUpdate) {
|
||||
this.lastUpdate = lastUpdate;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the stationCode
|
||||
*/
|
||||
|
|
|
@ -6,7 +6,7 @@ import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
|
|||
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
|
||||
|
||||
public class GeoMagPathProvider extends DefaultPathProvider {
|
||||
private static GeoMagPathProvider instance = new GeoMagPathProvider();
|
||||
private static GeoMagPathProvider instance = new GeoMagPathProvider();
|
||||
|
||||
public static GeoMagPathProvider getInstance() {
|
||||
return instance;
|
||||
|
@ -40,15 +40,13 @@ public class GeoMagPathProvider extends DefaultPathProvider {
|
|||
StringBuffer sb = new StringBuffer(64);
|
||||
sb.append(pluginName);
|
||||
|
||||
if (pdo.getDataTime() != null ) {
|
||||
Date time = (Date) pdo.getDataTime().getRefTime().clone();
|
||||
time.setHours(0);
|
||||
|
||||
// //System.out.println("***Path "+pdo.getDataTime().getRefTime() +" "+ fileNameFormat.get().format(pdo.getDataTime().getRefTime()));
|
||||
sb.append(fileNameFormat.get().format(time));
|
||||
sb.append(".h5");
|
||||
if (pdo.getDataTime() != null) {
|
||||
Date time = (Date) pdo.getDataTime().getRefTime().clone();
|
||||
time.setHours(0);
|
||||
|
||||
sb.append(fileNameFormat.get().format(time));
|
||||
sb.append(".h5");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag;
|
||||
|
||||
|
||||
import javax.persistence.Access;
|
||||
import javax.persistence.AccessType;
|
||||
import javax.persistence.Column;
|
||||
|
@ -21,8 +20,9 @@ import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider;
|
|||
import com.raytheon.uf.common.dataplugin.persist.PersistablePluginDataObject;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* Record implementation for geomag plugin.
|
||||
* Record implementation for geomag plugin.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
|
@ -51,49 +51,84 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public static final String OBS_TIME = "Obs_Time";//h5
|
||||
public static final String Comp_Insert_Time = "Comp_Insert_Time";
|
||||
public static final String component1 = "Component1";
|
||||
public static final String component2 = "Component2";
|
||||
public static final String component3 = "Component3";
|
||||
public static final String OBS_TIME = "Obs_Time";// h5
|
||||
|
||||
public static final String Comp_Insert_Time = "Comp_Insert_Time";
|
||||
|
||||
public static final String component1 = "Component1";
|
||||
|
||||
public static final String component2 = "Component2";
|
||||
|
||||
public static final String component3 = "Component3";
|
||||
|
||||
public static final String component4 = "Component4";
|
||||
|
||||
public static final String H_HR_AVG = "Hr_Avg_H";
|
||||
|
||||
public static final String D_HR_AVG = "Hr_Avg_D";
|
||||
|
||||
|
||||
public static final String K_Index = "P3h_K_Index";
|
||||
|
||||
public static final String K_Real = "P3h_K_Real";
|
||||
|
||||
public static final String K_Gamma = "P3h_Gamma";
|
||||
|
||||
public static final String Kest_Index = "P3h_Kest_Index";
|
||||
|
||||
public static final String Kest_Real = "P3h_Kest_Real";
|
||||
|
||||
public static final String Kest_Gamma = "P3h_Kest_Gamma";
|
||||
|
||||
public static final String KH_Real = "P3h_Hk_Real";
|
||||
|
||||
public static final String KH_Gamma = "P3h_H_Gamma";
|
||||
|
||||
public static final String KD_Real = "P3h_Dk_Real";
|
||||
|
||||
public static final String KD_Gamma = "P3h_D_Gamma";
|
||||
|
||||
public static final String A_Running = "P3h_A_Running";
|
||||
|
||||
public static final String A_Final_Running = "P3h_A_Final_Running";
|
||||
|
||||
public static final String KH_Int = "P3h_Hk_Int";
|
||||
|
||||
public static final String KD_Int = "P3h_Dk_Int";
|
||||
|
||||
public static final String Last_Update = "P3h_Last_Update";
|
||||
|
||||
|
||||
public static final String Kest_Index_1m = "P1m_Kest_Index";
|
||||
|
||||
public static final String Kest_Real_1m = "P1m_Kest_Real";
|
||||
|
||||
public static final String Kest_Gamma_1m = "P1m_Kest_Gamma";
|
||||
|
||||
public static final String KH_Real_1m = "P1m_Hk_Real";
|
||||
|
||||
public static final String KH_Gamma_1m = "P1m_H_Gamma";
|
||||
|
||||
public static final String KH_Index_1m = "P1m_Hk_Index";
|
||||
|
||||
public static final String KD_Real_1m = "P1m_Dk_Real";
|
||||
|
||||
public static final String KD_Gamma_1m = "P1m_D_Gamma";
|
||||
|
||||
public static final String KD_Index_1m = "P1m_Dk_Index";
|
||||
|
||||
public static final String KH_Count = "P1m_H_Count";
|
||||
|
||||
public static final String KD_Count = "P1m_D_Count";
|
||||
|
||||
public static final String KH_Dev = "P1m_H_Dev";
|
||||
|
||||
public static final String KD_Dev = "P1m_D_Dev";
|
||||
|
||||
public static final String A_est = "P1m_A_est";
|
||||
|
||||
public static final String K_s = "P1m_K_s";
|
||||
|
||||
public static final String Last_Update_1m = "P1m_Last_Update";
|
||||
|
||||
|
||||
/**
|
||||
* station code
|
||||
*/
|
||||
|
@ -116,19 +151,11 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
* report type
|
||||
*/
|
||||
@DataURI(position = 3)
|
||||
@Column
|
||||
@Column
|
||||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private String reportType;
|
||||
|
||||
// /**
|
||||
// * index for data in hdf5, 0-1439
|
||||
// */
|
||||
// @Column
|
||||
// @DynamicSerializeElement
|
||||
// @XmlAttribute
|
||||
// private int idx;
|
||||
|
||||
|
||||
/**
|
||||
* flag to indicate bad data point
|
||||
*/
|
||||
|
@ -136,7 +163,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private int badDataPoint;
|
||||
|
||||
|
||||
/**
|
||||
* H or X values
|
||||
*/
|
||||
|
@ -144,7 +171,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private float component_1;
|
||||
|
||||
|
||||
/**
|
||||
* D or Y values
|
||||
*/
|
||||
|
@ -152,7 +179,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private float component_2;
|
||||
|
||||
|
||||
/**
|
||||
* Z values
|
||||
*/
|
||||
|
@ -160,7 +187,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private float component_3;
|
||||
|
||||
|
||||
/**
|
||||
* F values
|
||||
*/
|
||||
|
@ -168,7 +195,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private float component_4;
|
||||
|
||||
|
||||
/**
|
||||
* F values
|
||||
*/
|
||||
|
@ -192,108 +219,37 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private int cadenceId;
|
||||
|
||||
|
||||
/**
|
||||
* Observation Date and Time for the minute values
|
||||
*/
|
||||
@Transient
|
||||
private long[] obs_times;
|
||||
|
||||
|
||||
/**
|
||||
* H or X values
|
||||
*/
|
||||
@Transient
|
||||
private float[] comp1_data;
|
||||
|
||||
|
||||
/**
|
||||
* D or Y values
|
||||
*/
|
||||
@Transient
|
||||
private float[] comp2_data;
|
||||
|
||||
|
||||
/**
|
||||
* Z values
|
||||
*/
|
||||
@Transient
|
||||
private float[] comp3_data;
|
||||
|
||||
|
||||
/**
|
||||
* F values
|
||||
*/
|
||||
@Transient
|
||||
private float[] comp4_data;
|
||||
|
||||
// /**
|
||||
// * Following for HrAvg values
|
||||
// */
|
||||
// @Transient
|
||||
// private float[] hrAvgH;
|
||||
//
|
||||
// @Transient
|
||||
// private float[] hrAvgD;
|
||||
//
|
||||
// @Transient
|
||||
// private long[] comp_InsertTime;
|
||||
//
|
||||
// /**
|
||||
// * Following for K values
|
||||
// */
|
||||
// // 3hr
|
||||
// @Transient
|
||||
// private int[] kK_Index;
|
||||
// @Transient
|
||||
// private float[] kK_Real;
|
||||
// @Transient
|
||||
// private float[] kK_Gamma;
|
||||
// @Transient
|
||||
// private int[] kest_Index;
|
||||
// @Transient
|
||||
// private float[] kest_Real;
|
||||
// @Transient
|
||||
// private float[] kest_Gamma;
|
||||
// @Transient
|
||||
// private float[] hK_Real;
|
||||
// @Transient
|
||||
// private float[] hK_Gamma;
|
||||
// @Transient
|
||||
// private float[] dK_Real;
|
||||
// @Transient
|
||||
// private float[] dK_Gamma;
|
||||
//
|
||||
// // 1 min
|
||||
// @Transient
|
||||
// private int[] h_Count;
|
||||
// @Transient
|
||||
// private int[] d_Count;
|
||||
// @Transient
|
||||
// private int[] kest_Index_1m;
|
||||
// @Transient
|
||||
// private float[] kest_Real_1m;
|
||||
// @Transient
|
||||
// private float[] kest_Gamma_1m;
|
||||
// @Transient
|
||||
// private float[] hK_Real_1m;
|
||||
// @Transient
|
||||
// private float[] hK_Gamma_1m;
|
||||
// @Transient
|
||||
// private float[] dK_Real_1m;
|
||||
// @Transient
|
||||
// private float[] dK_Gamma_1m;
|
||||
// @Transient
|
||||
// private int[] hK_Index_1m;
|
||||
// @Transient
|
||||
// private int[] dK_Index_1m;
|
||||
// @Transient
|
||||
// private float[] h_Dev;
|
||||
// @Transient
|
||||
// private float[] d_Dev;
|
||||
// @Transient
|
||||
// private float[] Ks;
|
||||
// @Transient
|
||||
// private int[] a_est;
|
||||
// @Transient
|
||||
// private float[] update_1m;
|
||||
|
||||
/**
|
||||
* No-arg Constructor
|
||||
*/
|
||||
|
@ -325,7 +281,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setStationCode(String stationCode) {
|
||||
this.stationCode = stationCode;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the sourceId
|
||||
*/
|
||||
|
@ -340,7 +296,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setSourceId(int sourceId) {
|
||||
this.sourceId = sourceId;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the badDataPoint
|
||||
*/
|
||||
|
@ -349,12 +305,13 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param the badDataPoint to set
|
||||
* @param the
|
||||
* badDataPoint to set
|
||||
*/
|
||||
public void setBadDataPoint(int badDataPoint) {
|
||||
this.badDataPoint = badDataPoint;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the reportType
|
||||
*/
|
||||
|
@ -376,10 +333,11 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public long[] getObsTimes() {
|
||||
return obs_times;
|
||||
}
|
||||
|
||||
public void setObsTimes(long[] obs_times) {
|
||||
this.obs_times = obs_times;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the comp1_data array
|
||||
*/
|
||||
|
@ -390,7 +348,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setComp1Data(float[] h_data) {
|
||||
this.comp1_data = h_data;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the comp2_data array
|
||||
*/
|
||||
|
@ -401,7 +359,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setComp2Data(float[] d_data) {
|
||||
this.comp2_data = d_data;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the comp3_data
|
||||
*/
|
||||
|
@ -412,18 +370,18 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setComp3Data(float[] z_data) {
|
||||
this.comp3_data = z_data;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the comp4_data
|
||||
*/
|
||||
public float[] getComp4Data() {
|
||||
return comp4_data;
|
||||
}
|
||||
|
||||
|
||||
public void setComp4Data(float[] f_data) {
|
||||
this.comp4_data = f_data;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the component_1
|
||||
*/
|
||||
|
@ -434,7 +392,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setComponent_1(float component_1) {
|
||||
this.component_1 = component_1;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the component_2
|
||||
*/
|
||||
|
@ -445,7 +403,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setComponent_2(float component_2) {
|
||||
this.component_2 = component_2;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the component_3
|
||||
*/
|
||||
|
@ -456,7 +414,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setComponent_3(float component_3) {
|
||||
this.component_3 = component_3;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the component_4
|
||||
*/
|
||||
|
@ -481,7 +439,7 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public void setDistributionId(int distributionId) {
|
||||
this.distributionId = distributionId;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the d_HrAvg
|
||||
*/
|
||||
|
@ -499,257 +457,16 @@ public class GeoMagRecord extends PersistablePluginDataObject {
|
|||
public int getCadenceId() {
|
||||
return cadenceId;
|
||||
}
|
||||
|
||||
|
||||
public void setCadenceId(int cadenceId) {
|
||||
this.cadenceId = cadenceId;
|
||||
}
|
||||
|
||||
// /*
|
||||
// * @param k index related
|
||||
// */
|
||||
// // 3hr
|
||||
// public int[] getKKIndex() {
|
||||
// return kK_Index;
|
||||
// }
|
||||
//
|
||||
// public void setKKIndex(int[] kK_Index) {
|
||||
// this.kK_Index = kK_Index;
|
||||
// }
|
||||
//
|
||||
// public float[] getKKReal() {
|
||||
// return kK_Real;
|
||||
// }
|
||||
//
|
||||
// public void setKKReal(float[] kK_Real) {
|
||||
// this.kK_Real = kK_Real;
|
||||
// }
|
||||
//
|
||||
// public float[] getKKGamma() {
|
||||
// return kK_Gamma;
|
||||
// }
|
||||
//
|
||||
// public void setKKGamma(float[] kK_Gamma) {
|
||||
// this.kK_Gamma = kK_Gamma;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// public float[] getHKReal() {
|
||||
// return hK_Real;
|
||||
// }
|
||||
//
|
||||
// public void setHKReal(float[] hK_Real) {
|
||||
// this.hK_Real = hK_Real;
|
||||
// }
|
||||
//
|
||||
// public float[] getDKReal() {
|
||||
// return dK_Real;
|
||||
// }
|
||||
//
|
||||
// public void setDKReal(float[] dK_Real) {
|
||||
// this.dK_Real = dK_Real;
|
||||
// }
|
||||
//
|
||||
// public int[] getKestIndex() {
|
||||
// return kest_Index;
|
||||
// }
|
||||
//
|
||||
// public void setKestIndex(int[] kest_Index) {
|
||||
// this.kest_Index = kest_Index;
|
||||
// }
|
||||
//
|
||||
// public float[] getKestReal() {
|
||||
// return kest_Real;
|
||||
// }
|
||||
//
|
||||
// public void setKestReal(float[] kest_Real) {
|
||||
// this.kest_Real = kest_Real;
|
||||
// }
|
||||
//
|
||||
// public float[] getKestGamma() {
|
||||
// return kest_Gamma;
|
||||
// }
|
||||
//
|
||||
// public void setKestGamma(float[] kest_Gamma) {
|
||||
// this.kest_Gamma = kest_Gamma;
|
||||
// }
|
||||
//
|
||||
// public float[] getHKGamma() {
|
||||
// return hK_Gamma;
|
||||
// }
|
||||
//
|
||||
// public void setHKGamma(float[] hK_Gamma) {
|
||||
// this.hK_Gamma = hK_Gamma;
|
||||
// }
|
||||
//
|
||||
// public float[] getDKGamma() {
|
||||
// return dK_Gamma;
|
||||
// }
|
||||
//
|
||||
// public void setDKGamma(float[] dK_Gamma) {
|
||||
// this.dK_Gamma = dK_Gamma;
|
||||
// }
|
||||
//
|
||||
// // 1min
|
||||
// public float[] getHKReal1m() {
|
||||
// return hK_Real_1m;
|
||||
// }
|
||||
//
|
||||
// public void setHKReal1m(float[] hK_Real_1m) {
|
||||
// this.hK_Real_1m = hK_Real_1m;
|
||||
// }
|
||||
//
|
||||
// public float[] getDKReal1m() {
|
||||
// return dK_Real_1m;
|
||||
// }
|
||||
//
|
||||
// public void setDKReal1m(float[] dK_Real_1m) {
|
||||
// this.dK_Real_1m = dK_Real_1m;
|
||||
// }
|
||||
//
|
||||
// public int[] getKestIndex1m() {
|
||||
// return kest_Index_1m;
|
||||
// }
|
||||
//
|
||||
// public void setKestIndex1m(int[] kest_Index_1m) {
|
||||
// this.kest_Index_1m = kest_Index_1m;
|
||||
// }
|
||||
//
|
||||
// public float[] getKestReal1m() {
|
||||
// return kest_Real_1m;
|
||||
// }
|
||||
//
|
||||
// public void setKestReal1m(float[] kest_Real_1m) {
|
||||
// this.kest_Real_1m = kest_Real_1m;
|
||||
// }
|
||||
//
|
||||
// public float[] getKestGamma1m() {
|
||||
// return kest_Gamma_1m;
|
||||
// }
|
||||
//
|
||||
// public void setKestGamma1m(float[] kest_Gamma_1m) {
|
||||
// this.kest_Gamma_1m = kest_Gamma_1m;
|
||||
// }
|
||||
//
|
||||
// public float[] getHKGamma1m() {
|
||||
// return hK_Gamma_1m;
|
||||
// }
|
||||
//
|
||||
// public void setHKGamma1m(float[] hK_Gamma_1m) {
|
||||
// this.hK_Gamma_1m = hK_Gamma_1m;
|
||||
// }
|
||||
//
|
||||
// public float[] getDKGamma1m() {
|
||||
// return dK_Gamma_1m;
|
||||
// }
|
||||
//
|
||||
// public void setDKGamma1m(float[] dK_Gamma_1m) {
|
||||
// this.dK_Gamma_1m = dK_Gamma_1m;
|
||||
// }
|
||||
//
|
||||
// public int[] getHKIndex1m() {
|
||||
// return hK_Index_1m;
|
||||
// }
|
||||
//
|
||||
// public void setHKIndex1m(int[] hK_Index_1m) {
|
||||
// this.hK_Index_1m = hK_Index_1m;
|
||||
// }
|
||||
//
|
||||
// public int[] getDKIndex1m() {
|
||||
// return dK_Index_1m;
|
||||
// }
|
||||
//
|
||||
// public void setDKIndex1m(int[] dK_Index_1m) {
|
||||
// this.dK_Index_1m = dK_Index_1m;
|
||||
// }
|
||||
//
|
||||
// public int[] getHCount() {
|
||||
// return h_Count;
|
||||
// }
|
||||
//
|
||||
// public void setHCount(int[] h_Count) {
|
||||
// this.h_Count = h_Count;
|
||||
// }
|
||||
//
|
||||
// public int[] getDCount() {
|
||||
// return d_Count;
|
||||
// }
|
||||
//
|
||||
// public void setDCount(int[] d_Count) {
|
||||
// this.d_Count = d_Count;
|
||||
// }
|
||||
//
|
||||
// public float[] getHDev() {
|
||||
// return h_Dev;
|
||||
// }
|
||||
//
|
||||
// public void setHDev(float[] h_Dev) {
|
||||
// this.h_Dev = h_Dev;
|
||||
// }
|
||||
//
|
||||
// public float[] getDDev() {
|
||||
// return d_Dev;
|
||||
// }
|
||||
//
|
||||
// public void setDDev(float[] d_Dev) {
|
||||
// this.d_Dev = d_Dev;
|
||||
// }
|
||||
//
|
||||
// public float[] getKs() {
|
||||
// return Ks;
|
||||
// }
|
||||
//
|
||||
// public void setKs(float[] Ks) {
|
||||
// this.Ks = Ks;
|
||||
// }
|
||||
//
|
||||
// public int[] getAest() {
|
||||
// return a_est;
|
||||
// }
|
||||
//
|
||||
// public void setAest(int[] a_est) {
|
||||
// this.a_est = a_est;
|
||||
// }
|
||||
//
|
||||
// public float[] getLastUpdate() {
|
||||
// return update_1m;
|
||||
// }
|
||||
//
|
||||
// public void setLastUpdate(float[] update_1m) {
|
||||
// this.update_1m = update_1m;
|
||||
// }
|
||||
|
||||
@Override
|
||||
public IDecoderGettable getDecoderGettable() {
|
||||
return null;
|
||||
}
|
||||
|
||||
// public void retrieveFromDataStore(IDataStore dataStore) {
|
||||
//
|
||||
// try {
|
||||
// IDataRecord[] dataRec = dataStore.retrieve(getDataURI());
|
||||
// for (int i = 0; i < dataRec.length; i++) {
|
||||
// if (dataRec[i].getName().equals(GeoMagRecord.OBS_TIME)) {
|
||||
// obs_times = (((LongDataRecord) dataRec[i]).getLongData());
|
||||
// }
|
||||
// if (dataRec[i].getName().equals(GeoMagRecord.component1)) {
|
||||
// comp1_data = (((FloatDataRecord) dataRec[i]).getFloatData());
|
||||
// }
|
||||
// if (dataRec[i].getName().equals(GeoMagRecord.component2)) {
|
||||
// comp2_data = (((FloatDataRecord) dataRec[i]).getFloatData());
|
||||
// }
|
||||
// if (dataRec[i].getName().equals(GeoMagRecord.component3)) {
|
||||
// comp3_data = (((FloatDataRecord) dataRec[i]).getFloatData());
|
||||
// }
|
||||
// if (dataRec[i].getName().equals(GeoMagRecord.component4)) {
|
||||
// comp4_data = (((FloatDataRecord) dataRec[i]).getFloatData());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// } catch (Exception se) {
|
||||
// se.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
|
||||
@Override
|
||||
@Column
|
||||
@Access(AccessType.PROPERTY)
|
||||
|
|
|
@ -0,0 +1,486 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.calculation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* The calculation of k, 1 minute related.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ----------- ---------- ---------- --------------------------
|
||||
* 05/14/2013 #989 qzhou Initial Creation
|
||||
* 03/18/2014 #1123 qzhou Add getHdevOrDDev
|
||||
* 04/09/2014 #1123 qzhou Modified getKIndex for gamma value
|
||||
* 06/23/2014 R4152 qzhou Fixed on getQHAQDC formula
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1
|
||||
*/
|
||||
public class CalcEach1min {
|
||||
private static final float MISSING_VAL = 99999.99f;
|
||||
|
||||
private static final int MAX_GAP_LENGTH = 15;
|
||||
|
||||
private static final int SMOOTH_WINDOW = 60;
|
||||
|
||||
private static final int TRANSITION_TIME = 60;
|
||||
|
||||
private static final int PHASE_POWER = 3;
|
||||
|
||||
private static final int HARM_ORDER = 5;
|
||||
|
||||
private static int HOURS = 24;
|
||||
|
||||
private static int MINUTES = 60;
|
||||
|
||||
/*
|
||||
* @param dataIn -- data of 4320
|
||||
*/
|
||||
public static float[] fillGaps(float[] dataIn) {
|
||||
float[] data = dataIn.clone();
|
||||
int i = 0;
|
||||
int size = data.length;
|
||||
|
||||
while (i < size) {
|
||||
|
||||
// Find the next missing value
|
||||
int flag = 0; // flag used for break
|
||||
while (i < size && flag == 0) {
|
||||
if (data[i] == MISSING_VAL)
|
||||
flag = 1;
|
||||
else
|
||||
i++;
|
||||
}
|
||||
|
||||
// If a gap was found handle it
|
||||
if (i < size) {
|
||||
int gapIndex = i; // index of first missing value
|
||||
|
||||
// Find the last missing point
|
||||
flag = 0;
|
||||
while (i < size && flag == 0) {
|
||||
if (data[i] != MISSING_VAL)
|
||||
flag = 1;
|
||||
else
|
||||
i++;
|
||||
}
|
||||
|
||||
// Interpolate the gap if possible. We cannot extrapolate
|
||||
if ((gapIndex > 0) && (i < size)) {
|
||||
// Now i is the index of first non-missing value
|
||||
// and GapIndex is the index of first missing value
|
||||
int gapLength = i - gapIndex; // i is index of first
|
||||
// non-missing value
|
||||
|
||||
// Interpolate if the gap is small enough
|
||||
if (gapLength < MAX_GAP_LENGTH) {
|
||||
float value1 = data[gapIndex - 1];
|
||||
float value2 = data[i];
|
||||
for (int j = 1; j < gapLength + 1; j++)
|
||||
data[gapIndex++] = value1 + (j * (value2 - value1))
|
||||
/ (gapLength + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/*
|
||||
* 24 element floating point array. (DefLength + 30 + kLength) Find out how
|
||||
* many points are used to get the centered hour average
|
||||
*/
|
||||
public static float[] getFitLength(float[] defLength, float[] kIndex,
|
||||
float[] kLength) {
|
||||
float[] fitLength = new float[HOURS];
|
||||
int[] ind = new int[HOURS];
|
||||
float[] curK = new float[HOURS];
|
||||
|
||||
for (int i = 0; i < HOURS; i++) {
|
||||
fitLength[i] = 30.0f + defLength[i];
|
||||
ind[i] = (int) Math.floor(i / 3.0f);
|
||||
curK[i] = kIndex[ind[i]];
|
||||
|
||||
if (curK[i] != MISSING_VAL)
|
||||
fitLength[i] += kLength[(int) curK[i]];
|
||||
|
||||
if (fitLength[i] > 1440.0f)
|
||||
fitLength[i] = 1440.0f;
|
||||
}
|
||||
|
||||
return fitLength;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param data (hhdata, dddata), float[4320]
|
||||
*
|
||||
* @return -- 24 element floating point array. Calculate averages centered
|
||||
* on each hour of the day
|
||||
*/
|
||||
public static float[] getCentHourAvg(float[] data, float[] fitLength,
|
||||
float[] defLength) {
|
||||
|
||||
float[] HrAvg = new float[HOURS]; // double
|
||||
Arrays.fill(HrAvg, MISSING_VAL);
|
||||
|
||||
for (int ihr = 0; ihr < HOURS; ihr++) {
|
||||
// take middle interval
|
||||
int center = 1440 + ihr * MINUTES + 30;
|
||||
int start = center - Math.round(fitLength[ihr]);
|
||||
int end = center + Math.round(fitLength[ihr]);
|
||||
int missing = 0;
|
||||
double sum = 0;
|
||||
|
||||
// if data[i] have no missing value
|
||||
for (int i = start; i < end + 1; i++) {
|
||||
|
||||
if (data[i] != MISSING_VAL) {
|
||||
sum += data[i];
|
||||
} else {
|
||||
missing++;
|
||||
break;// this loop
|
||||
}
|
||||
}
|
||||
|
||||
if (missing == 0) // no missing value
|
||||
HrAvg[ihr] = (float) sum / (end - start + 1);
|
||||
}
|
||||
|
||||
// if HrAvg have missing value
|
||||
// Extrapolate the first missing points--missing beginning
|
||||
int hr0 = 0;
|
||||
int flag = 0;
|
||||
while (hr0 < HOURS && flag == 0) {
|
||||
if (HrAvg[hr0] != MISSING_VAL)
|
||||
flag = 1;
|
||||
else {
|
||||
hr0++;
|
||||
|
||||
}
|
||||
}
|
||||
if (hr0 > 0 && hr0 < HOURS)
|
||||
for (int i = 0; i < hr0; i++)
|
||||
HrAvg[i] = HrAvg[hr0];
|
||||
|
||||
// Extrapolate the last missing points--missing end
|
||||
int hr1 = 23;
|
||||
while ((hr1 > hr0) && (HrAvg[hr1] == MISSING_VAL))
|
||||
hr1--;
|
||||
if (hr1 < 23)
|
||||
for (int i = hr1 + 1; i < HOURS; i++)
|
||||
HrAvg[i] = HrAvg[hr1];
|
||||
|
||||
// Interpolate the missing points between hour0 and hour1
|
||||
// Both hour0 and hour1 are hours where data exists
|
||||
while (hr0 < hr1) {
|
||||
|
||||
do {
|
||||
hr0++;
|
||||
} while (hr0 < hr1 && HrAvg[hr0] != MISSING_VAL);
|
||||
|
||||
if (hr0 < hr1) {
|
||||
int hr = hr0; // first missing hour
|
||||
while ((hr0 < hr1) && (HrAvg[hr0] == MISSING_VAL))
|
||||
hr0++;
|
||||
int gapLength = hr0 - hr;
|
||||
float value1 = HrAvg[hr - 1];// not missing
|
||||
float value2 = HrAvg[hr0]; // not missing
|
||||
|
||||
for (int i = 1; i < gapLength + 1; i++)
|
||||
HrAvg[hr++] = value1 + (i * (value2 - value1))
|
||||
/ (gapLength + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return HrAvg;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param hrAvg -- QHA data 1440
|
||||
*/
|
||||
public static float[] getHarmonicFit(float[] hrCentAvg) {
|
||||
float[] fitCurve = new float[1440];
|
||||
int delta = MINUTES; // minutes between points in HrAvg
|
||||
int t0 = 30; // time tag for first point in HrAvg
|
||||
float t1 = (HOURS - 1) * delta + t0; // time tag for last point in HrAvg
|
||||
|
||||
// Rotate HrAvg so that 1st and last points are equal, store in HA
|
||||
float r_coeff = (hrCentAvg[HOURS - 1] - hrCentAvg[0]) / (t1 - t0);
|
||||
float[] hrA = new float[HOURS];// 0.0*HrAvg
|
||||
for (int i = 0; i < HOURS; i++)
|
||||
hrA[i] = hrCentAvg[i] - r_coeff * (i * delta);
|
||||
|
||||
// Calculate first Fourier series coefficients up to Horder
|
||||
float[] reA = new float[HARM_ORDER + 1]; // real part of the Fourier
|
||||
// Series Coefficients
|
||||
// (initially 0)
|
||||
float[] imA = new float[HARM_ORDER + 1]; // imaginary part of Fourier
|
||||
// Series Coefficients
|
||||
// (initially 0)
|
||||
for (int i = 0; i < HARM_ORDER + 1; i++) {
|
||||
for (int j = 0; j < HOURS; j++) {
|
||||
reA[i] += hrA[j] * Math.cos(2 * (Math.PI) * j * i / HOURS);
|
||||
imA[i] -= hrA[j] * Math.sin(2 * (Math.PI) * j * i / HOURS);
|
||||
}
|
||||
}
|
||||
|
||||
// Derive FitCurve as harmonic fit using inverse transform
|
||||
for (int t = 0; t < HOURS * delta; t++) { // t is minute of the day
|
||||
float theta = (float) (2 * (Math.PI) * (t - t0) / (HOURS * delta));
|
||||
fitCurve[t] = reA[0] / HOURS;
|
||||
for (int i = 1; i < HARM_ORDER + 1; i++)
|
||||
fitCurve[t] += (2 * reA[i] * Math.cos(i * theta) - 2 * imA[i]
|
||||
* Math.sin(i * theta))
|
||||
/ HOURS;
|
||||
|
||||
// Derotate FitCurve by same amount as HrAvg
|
||||
fitCurve[t] += r_coeff * (t - t0);
|
||||
}
|
||||
|
||||
return fitCurve;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param hdev,ddev -- float[1440]
|
||||
*/
|
||||
public static List getKIndex(float[] hdev, float[] ddev, int[] kLimit,
|
||||
int missingFlag) {
|
||||
List<float[]> list = new ArrayList<float[]>();
|
||||
|
||||
// Initialize the return data with MissingValue
|
||||
float[] kIndex = new float[8];
|
||||
float[] hk = new float[8];
|
||||
float[] dk = new float[8];
|
||||
float[] gamma = new float[8];
|
||||
float[] hGamma = new float[8];
|
||||
float[] dGamma = new float[8];
|
||||
|
||||
Arrays.fill(kIndex, MISSING_VAL);
|
||||
Arrays.fill(hk, MISSING_VAL);
|
||||
Arrays.fill(dk, MISSING_VAL);
|
||||
Arrays.fill(gamma, MISSING_VAL);
|
||||
Arrays.fill(hGamma, MISSING_VAL);
|
||||
Arrays.fill(dGamma, MISSING_VAL);
|
||||
|
||||
// Check for bad input data
|
||||
int npts = hdev.length;
|
||||
if (npts != ddev.length)
|
||||
return list;
|
||||
|
||||
if (npts < 1261 || npts > 1440) // 21*60+1
|
||||
return list;
|
||||
|
||||
// Step through each three hourly interval
|
||||
for (int ipd = 0; ipd < 8; ipd++) {
|
||||
int istart = ipd * 180;
|
||||
int iend = istart + 180 - 1;
|
||||
if (iend >= npts)
|
||||
iend = npts - 1; // allow for partial interval on the end
|
||||
if (iend < istart)
|
||||
continue; // should never happen...
|
||||
|
||||
// Check for missing data
|
||||
int i = 0;
|
||||
int ii = 0;
|
||||
int npdpts = iend - istart + 1; // number of possible points in the
|
||||
// period, =180
|
||||
float[] hhdev = new float[npdpts];
|
||||
float[] dddev = new float[npdpts];
|
||||
|
||||
for (int j = istart; j < iend + 1; j++) {
|
||||
hhdev[j - istart] = hdev[j];
|
||||
dddev[j - istart] = ddev[j];
|
||||
}
|
||||
|
||||
// get hdevGood
|
||||
for (i = npdpts - 1; i >= 0; i--)
|
||||
if (hhdev[i] != MISSING_VAL && hhdev[i] != 0)
|
||||
break;
|
||||
|
||||
for (ii = npdpts - 1; ii >= 0; ii--)
|
||||
if (dddev[ii] != MISSING_VAL && dddev[ii] != 0)
|
||||
break;
|
||||
|
||||
// i, ii are the last data that is not missing
|
||||
float[] hdevGood = new float[i + 1];
|
||||
float[] ddevGood = new float[ii + 1];
|
||||
if (i > -1)
|
||||
for (int j = 0; j < i + 1; j++)
|
||||
hdevGood[j] = hhdev[j];
|
||||
if (ii > -1)
|
||||
for (int j = 0; j < ii + 1; j++)
|
||||
ddevGood[j] = dddev[j];
|
||||
|
||||
if (missingFlag == 0 || (i > -1 && ii > -1)) {
|
||||
if (hdevGood != null && hdevGood.length != 0)
|
||||
hGamma[ipd] = CalcUtil.maxValue(hdevGood)
|
||||
- CalcUtil.minValue(hdevGood);
|
||||
if (ddevGood != null && ddevGood.length != 0)
|
||||
dGamma[ipd] = CalcUtil.maxValue(ddevGood)
|
||||
- CalcUtil.minValue(ddevGood);
|
||||
|
||||
if (hGamma[ipd] != MISSING_VAL)
|
||||
hk[ipd] = CalcUtil.getKfromTable(kLimit, hGamma[ipd]);
|
||||
|
||||
if (dGamma[ipd] != MISSING_VAL)
|
||||
dk[ipd] = CalcUtil.getKfromTable(kLimit, dGamma[ipd]);
|
||||
|
||||
// get bigger one
|
||||
if (hGamma[ipd] >= dGamma[ipd] && hGamma[ipd] != MISSING_VAL) {
|
||||
kIndex[ipd] = hk[ipd];
|
||||
gamma[ipd] = hGamma[ipd];
|
||||
} else if (dGamma[ipd] >= hGamma[ipd]
|
||||
&& dGamma[ipd] != MISSING_VAL) {
|
||||
kIndex[ipd] = dk[ipd];
|
||||
gamma[ipd] = dGamma[ipd];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
list.add(0, kIndex);
|
||||
list.add(1, gamma);
|
||||
list.add(2, hk);
|
||||
list.add(3, hGamma);
|
||||
list.add(4, dk);
|
||||
list.add(5, dGamma);
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
/*
|
||||
* Force QHAQDC and QDAQDC to be continuous between the last and the first
|
||||
* value using a +/- SMOOTH_WINDOW
|
||||
*/
|
||||
public static float[] getQHAQDC(float[] qdc) {
|
||||
float[] data = qdc.clone(); // new float[1440];
|
||||
|
||||
if (qdc.length != 1440)
|
||||
return data;
|
||||
|
||||
float jump = qdc[0] - qdc[1439];
|
||||
|
||||
for (int i = 0; i < SMOOTH_WINDOW; i++) {
|
||||
|
||||
data[1440 - SMOOTH_WINDOW + i] += ((float) i / (SMOOTH_WINDOW - 1))
|
||||
* 0.5f * jump;
|
||||
data[i] -= (1.0f - (float) i / (SMOOTH_WINDOW - 1)) * 0.5f * jump;
|
||||
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/*
|
||||
* find index in hhdata that indicates current time currTimeIndex = first
|
||||
* 1440 minutes + prev day minutes + curr day minutes
|
||||
*/
|
||||
public static int getCurrTimeIndex(int hour, int min, int epHour) {
|
||||
|
||||
if (epHour == 0)
|
||||
epHour = 24;
|
||||
|
||||
int currTimeIndex = HOURS * MINUTES + (HOURS - epHour) * MINUTES + hour
|
||||
* MINUTES + min;
|
||||
|
||||
return currTimeIndex;
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
*/
|
||||
public static float[] getExtrapolation(float[] dataIn, float[] qhaQdc,
|
||||
int currTimeIndex) { // 4320
|
||||
float[] data = dataIn.clone();
|
||||
int j0 = currTimeIndex;// Last good H or D index
|
||||
|
||||
if (data.length != 4320 || qhaQdc.length != 1440)
|
||||
return data;
|
||||
|
||||
if (data[j0] != MISSING_VAL) {
|
||||
for (int j = j0 + 1; j < 4320; j++) {
|
||||
int w2 = j - j0 - 1; // from .pro
|
||||
int w1 = TRANSITION_TIME - w2;
|
||||
|
||||
if (w1 < 0)
|
||||
w1 = 0;
|
||||
|
||||
data[j] = (w1 * data[j0] + w2 * qhaQdc[j % 1440]) / (w1 + w2);
|
||||
}
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
public static float[] getDev(float[] data, float[] qdc) {
|
||||
float[] dev = new float[1440];
|
||||
|
||||
if (data.length != 4320 || qdc.length != 1440)
|
||||
return dev;
|
||||
|
||||
for (int i = 0; i < 1440; i++) {
|
||||
|
||||
if (data[i + 1440] != MISSING_VAL && qdc[i] != MISSING_VAL)
|
||||
dev[i] = data[i + 1440] - qdc[i];
|
||||
else
|
||||
dev[i] = MISSING_VAL;
|
||||
}
|
||||
|
||||
return dev;
|
||||
}
|
||||
|
||||
public static float[] adjustHrCentAvg(float[] hcAIn, float[] qha,
|
||||
float[] gamma, int[] kLimit) {
|
||||
float[] hcA = hcAIn.clone();
|
||||
float wh = 0;
|
||||
|
||||
if (hcA.length != HOURS || gamma.length != 8)
|
||||
return hcA;
|
||||
|
||||
for (int ipd = 0; ipd < 8; ipd++) {
|
||||
if (gamma[ipd] < kLimit[4])
|
||||
wh = 1;
|
||||
else if (gamma[ipd] >= kLimit[4] && gamma[ipd] < kLimit[6])
|
||||
wh = (float) Math.pow(
|
||||
((kLimit[6] - gamma[ipd]) / (kLimit[6] - kLimit[4])),
|
||||
PHASE_POWER);
|
||||
else
|
||||
wh = 0;
|
||||
|
||||
for (int j = 0; j < 3; j++) {
|
||||
hcA[ipd * 3 + j] = wh * hcA[ipd * 3 + j] + (1 - wh)
|
||||
* qha[ipd * 3 + j];// ?
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return hcA;
|
||||
}
|
||||
|
||||
/*
|
||||
* wraper function for a few functions in this class.
|
||||
*
|
||||
* @param -- hdata, H or D data
|
||||
*
|
||||
* @param -- hQdc, H or D quiet day curve. float[1440]
|
||||
*
|
||||
* @param -- currTimeIndex, current time index in the array
|
||||
*
|
||||
* @return -- hDev or dDev. float[1440]
|
||||
*/
|
||||
public static float[] getHdevOrDDev(float[] hdata, float[] hQdc,
|
||||
int currTimeIndex) {
|
||||
float[] hDev = null;
|
||||
|
||||
float[] hhdata = CalcEach1min.fillGaps(hdata);
|
||||
|
||||
hDev = CalcEach1min.getDev(hhdata, hQdc);// [1440]
|
||||
|
||||
return hDev;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,393 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.calculation;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/*
|
||||
* The calculation of k, 3 hour related.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ----------- ---------- ---------- --------------------------
|
||||
* 05/14/2013 #989 qzhou Initial Creation
|
||||
* 03/18/2014 #1123 qzhou Add getHQdcOrDQdc
|
||||
* 06/23/2014 R4152 qzhou Touched up functions that do not affect the results
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1
|
||||
*/
|
||||
public class CalcEach3hr {
|
||||
private static final float MISSING_VAL = 99999.99f;
|
||||
|
||||
private static final int NIGHT_LENGTH = 90; // min
|
||||
|
||||
private static final int DAWN_LENGTH = 60;
|
||||
|
||||
private static final int DAY_LENGTH = 0;
|
||||
|
||||
private static final int DUSK_LENGTH = 60;
|
||||
|
||||
private static int DAYS = 30;
|
||||
|
||||
private static int HOURS = 24;
|
||||
|
||||
private static int MINUTES = 60;
|
||||
|
||||
/*
|
||||
* calculate hrAvgs for this hour
|
||||
*
|
||||
* @param bestList -- contains 1 hour data
|
||||
*/
|
||||
public static float[] getSimpleHourAvg(List bestList) {
|
||||
float[] simpHrAvg = new float[2];
|
||||
float simpHrAvg1 = 0;
|
||||
float simpHrAvg2 = 0;
|
||||
double sum1 = 0;
|
||||
double sum2 = 0;
|
||||
int rec1 = 0;
|
||||
int rec2 = 0;
|
||||
|
||||
for (int i = 0; i < bestList.size(); i++) {
|
||||
|
||||
List<Float> list = (List<Float>) bestList.get(i);
|
||||
|
||||
float comp1 = (Float) list.get(1);
|
||||
float comp2 = (Float) list.get(2);
|
||||
|
||||
if (comp1 != MISSING_VAL) {
|
||||
sum1 += comp1;
|
||||
rec1++;
|
||||
}
|
||||
if (comp2 != MISSING_VAL) {
|
||||
sum2 += comp2;
|
||||
rec2++;
|
||||
}
|
||||
}
|
||||
|
||||
if (rec1 > 30) // less than half missing value
|
||||
simpHrAvg1 = (float) sum1 / rec1;
|
||||
else
|
||||
simpHrAvg1 = MISSING_VAL;
|
||||
|
||||
if (rec2 > 30) // less than half missing value
|
||||
simpHrAvg2 = (float) sum2 / rec2;
|
||||
else
|
||||
simpHrAvg2 = MISSING_VAL;
|
||||
|
||||
simpHrAvg[0] = simpHrAvg1;
|
||||
simpHrAvg[1] = simpHrAvg2;
|
||||
|
||||
return simpHrAvg;
|
||||
}
|
||||
|
||||
/*
|
||||
* calculate hrAvgs for this day.
|
||||
*
|
||||
* @param data -- data of one day, 1440
|
||||
*/
|
||||
public static float[] getSimpleHourAvg(float[] data) { // data 1440
|
||||
|
||||
float[] simpHrAvg = new float[HOURS];
|
||||
|
||||
for (int ihr = 0; ihr < HOURS; ihr++) {
|
||||
double sum = 0;
|
||||
int missing = 0;
|
||||
|
||||
for (int i = ihr * MINUTES; i < ihr * MINUTES + MINUTES; i++) {
|
||||
|
||||
if (data[i] != MISSING_VAL)
|
||||
sum += data[i];
|
||||
else
|
||||
missing++;
|
||||
}
|
||||
|
||||
if (missing < 30) // less than half missing value
|
||||
simpHrAvg[ihr] = (float) sum / (MINUTES - missing);
|
||||
else
|
||||
simpHrAvg[ihr] = MISSING_VAL;
|
||||
}
|
||||
|
||||
return simpHrAvg;
|
||||
}
|
||||
|
||||
/*
|
||||
* calculate hrAvgs for this hour in data array
|
||||
*
|
||||
* @param data -- data of one day, 1440
|
||||
*/
|
||||
public static float getSimpleHourAvg(float[] data, int hour) { // one day
|
||||
// 1440, avg
|
||||
// for hour-1
|
||||
|
||||
float simpHrAvg = 0;
|
||||
double sum = 0;
|
||||
int rec = 0;
|
||||
|
||||
if (data.length <= hour * MINUTES + MINUTES)
|
||||
for (int i = hour * MINUTES; i < data.length; i++) {
|
||||
if (data[i] != MISSING_VAL) {
|
||||
sum += data[i];
|
||||
rec++;
|
||||
}
|
||||
}
|
||||
else
|
||||
for (int i = hour * MINUTES; i < hour * MINUTES + MINUTES; i++) {
|
||||
if (data[i] != MISSING_VAL) {
|
||||
sum += data[i];
|
||||
rec++;
|
||||
}
|
||||
}
|
||||
|
||||
if (rec > 30) // less than half missing value
|
||||
simpHrAvg = (float) sum / (rec);
|
||||
else
|
||||
simpHrAvg = MISSING_VAL;
|
||||
|
||||
return simpHrAvg;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param simpHrAvgH -- data of 30 intervals(720 hours)
|
||||
*
|
||||
* @return disturbance levels for 30 intervals
|
||||
*/
|
||||
public static float[] getDisturbanceLevel(float[] simpHrAvgH,
|
||||
float[] simpHrAvgD) {
|
||||
float[] dB = new float[30];
|
||||
|
||||
for (int j = 0; j < DAYS; j++) {
|
||||
double sum = 0;
|
||||
int count = 0;
|
||||
|
||||
for (int i = 0; i < 23; i++) {
|
||||
int ii = j * HOURS + i;
|
||||
|
||||
if (simpHrAvgH[ii] != MISSING_VAL
|
||||
&& simpHrAvgD[ii] != MISSING_VAL
|
||||
&& simpHrAvgH[ii + 1] != MISSING_VAL
|
||||
&& simpHrAvgD[ii + 1] != MISSING_VAL) {
|
||||
sum += Math
|
||||
.sqrt(Math.pow(
|
||||
(simpHrAvgH[ii + 1] - simpHrAvgH[ii]), 2)
|
||||
+ Math.pow(
|
||||
(simpHrAvgD[ii + 1] - simpHrAvgD[ii]),
|
||||
2));
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (count >= 12) // not 12 or more missing
|
||||
dB[j] = (float) sum / count;
|
||||
else
|
||||
dB[j] = MISSING_VAL;
|
||||
|
||||
}
|
||||
|
||||
return dB;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param dB -- float[30 ]
|
||||
*
|
||||
* @return --5 smallest disturbance levels
|
||||
*/
|
||||
public static Map getSmallDisturbanceLevel(float[] dB) {
|
||||
// create a map that key=dBIndex and value=dBValue.
|
||||
// create a duplicate array dBDup. Sort it.
|
||||
// take 5 smallest dBDup[i]. Then find its index and value from the dB.
|
||||
// Put them to the map
|
||||
Map<Integer, Float> dBSmall = new HashMap<Integer, Float>();
|
||||
|
||||
float[] dBDup = new float[dB.length];
|
||||
for (int i = 0; i < dBDup.length; i++) {
|
||||
dBDup[i] = dB[i];
|
||||
}
|
||||
|
||||
Arrays.sort(dBDup);
|
||||
|
||||
float dupIndex = (int) MISSING_VAL;
|
||||
float wk = 0;
|
||||
// take 5 smallest dBDup
|
||||
for (int j = 0; j < 5; j++) {
|
||||
for (int i = 0; i < dB.length; i++) {
|
||||
if (dB[i] == dBDup[j] && i != dupIndex) { // for duplicated
|
||||
// values
|
||||
|
||||
dBSmall.put(i, dB[i]);
|
||||
dupIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dBSmall;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param -- dBSmall, 5 set map
|
||||
*
|
||||
* @param -- simpHrAvg, -- float[720]
|
||||
*
|
||||
* @rturn -- quietLevelHourAvg, float[24]
|
||||
*/
|
||||
public static float[] getQuietLevelHourAvg(Map<Integer, Float> dBSmall,
|
||||
float[] simpHrAvg) {
|
||||
if (dBSmall.entrySet().size() < 5)
|
||||
return simpHrAvg;
|
||||
|
||||
float[] quietHrAvg = new float[24];
|
||||
Arrays.fill(quietHrAvg, MISSING_VAL);
|
||||
int[] index = new int[5];
|
||||
float[] dB = new float[5];
|
||||
|
||||
int k = 0;
|
||||
Iterator<?> iter = dBSmall.entrySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map.Entry<Integer, Float> mEntry = (Map.Entry<Integer, Float>) iter
|
||||
.next(); // sorted on key
|
||||
|
||||
index[k] = mEntry.getKey();
|
||||
dB[k] = mEntry.getValue();
|
||||
|
||||
k++;
|
||||
}
|
||||
|
||||
// construct smallHrAvg array (24*5) from simpHrAvg (24*30)
|
||||
float[] smallHrAvg = new float[24 * 5];
|
||||
|
||||
for (int j = 0; j < 5; j++) { // k=5
|
||||
int endOfArray = smallHrAvg.length;
|
||||
int endTime = (endOfArray > j * HOURS + HOURS) ? j * HOURS + HOURS
|
||||
: endOfArray;
|
||||
|
||||
for (int i = j * HOURS; i < endTime; i++) {
|
||||
smallHrAvg[i] = simpHrAvg[index[j] * HOURS + i % HOURS]; // 700
|
||||
}
|
||||
}
|
||||
|
||||
for (int ihr = 0; ihr < HOURS; ihr++) {
|
||||
float sumAvg = 0;
|
||||
float sumWk = 0;
|
||||
float wk = 0;
|
||||
|
||||
for (int jk = 0; jk < 5; jk++) {
|
||||
int ind = jk * HOURS + ihr;
|
||||
if (dB[jk] < 1)
|
||||
wk = 1;
|
||||
else
|
||||
wk = 1.0f / (dB[jk] * dB[jk]);
|
||||
|
||||
if (smallHrAvg[ind] != MISSING_VAL) {
|
||||
sumAvg += wk * smallHrAvg[ind];
|
||||
sumWk += wk;
|
||||
}
|
||||
}
|
||||
|
||||
if (sumWk > 0)
|
||||
quietHrAvg[ihr] = sumAvg / sumWk;
|
||||
|
||||
}
|
||||
|
||||
return quietHrAvg;
|
||||
}
|
||||
|
||||
/*
|
||||
* @param -- quietHrAvg, float[24]
|
||||
*
|
||||
* @return -- shifted quietLevelHourAvg, float[24]
|
||||
*/
|
||||
public static float[] getQHA(float[] quietHrAvg) {
|
||||
float[] QHA = new float[24];
|
||||
|
||||
if (quietHrAvg.length != 24)
|
||||
return quietHrAvg;
|
||||
|
||||
for (int ihr = 0; ihr < 24; ihr++) {
|
||||
QHA[ihr] = quietHrAvg[(ihr + 3) % 24];
|
||||
}
|
||||
|
||||
return QHA;
|
||||
}
|
||||
|
||||
/*
|
||||
* @return -- 24 element floating point array. Default fitting lengths.
|
||||
* (one for each hour of the 24 hour interval that ends at EPtime).
|
||||
*/
|
||||
public static float[] getDefLength(String station, int epHour) {
|
||||
float[] defLength = new float[24];
|
||||
float lon = CalcUtil.getLongitude(station);
|
||||
int UTdiff = Math.round(1440.0f * lon / 360.0f);
|
||||
int minute0 = epHour * MINUTES;
|
||||
|
||||
for (int ihr = 0; ihr < HOURS; ihr++) {
|
||||
float sum = 0;
|
||||
|
||||
for (int imin = 0; imin < MINUTES; imin++) {
|
||||
int curMin = (minute0 + ihr * MINUTES + imin) % 1440;
|
||||
int localMin = (curMin + UTdiff) % 1440;
|
||||
|
||||
if (localMin >= 0 && localMin < 180)
|
||||
sum += NIGHT_LENGTH;
|
||||
else if (localMin >= 180 && localMin < 360)
|
||||
sum += DAWN_LENGTH;
|
||||
else if (localMin >= 360 && localMin < 1080)
|
||||
sum += DAY_LENGTH;
|
||||
else if (localMin >= 1080 && localMin < 1260)
|
||||
sum += DUSK_LENGTH;
|
||||
else if (localMin >= 1260 && localMin < 1440)
|
||||
sum += NIGHT_LENGTH;
|
||||
}
|
||||
|
||||
defLength[ihr] = sum / MINUTES;
|
||||
|
||||
}
|
||||
|
||||
return defLength;
|
||||
}
|
||||
|
||||
/*
|
||||
* wraper function for a few functions in this class.
|
||||
*
|
||||
* @param -- hHrAvgs, hourly average for H. float[720]
|
||||
*
|
||||
* @param -- dHrAvgs, hourly average for D. float[720]
|
||||
*
|
||||
* @return -- if hHrAvgs is first param, return hQdc; if dHrAvgs is first
|
||||
* param, return dQdc. float[1440]
|
||||
*/
|
||||
public static float[] getHQdcOrDQdc(float[] hHrAvgs, float[] dHrAvgs) {
|
||||
float[] hQdc = null;
|
||||
float[] qhaQdc = null;
|
||||
|
||||
float[] dB = CalcEach3hr.getDisturbanceLevel(hHrAvgs, dHrAvgs);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<Integer, Float> dBsmall = CalcEach3hr.getSmallDisturbanceLevel(dB);
|
||||
|
||||
float[] quietHHrAvg = CalcEach3hr
|
||||
.getQuietLevelHourAvg(dBsmall, hHrAvgs);
|
||||
|
||||
// added from FMIQDCRT11_3hr.pro
|
||||
for (int k = 0; k < quietHHrAvg.length; k++) {
|
||||
if (quietHHrAvg[k] == MISSING_VAL) {
|
||||
quietHHrAvg[k] = CalcUtil.getMedian(quietHHrAvg);
|
||||
}
|
||||
}
|
||||
|
||||
float[] qha = CalcEach3hr.getQHA(quietHHrAvg);
|
||||
|
||||
hQdc = CalcEach1min.getHarmonicFit(qha);// [1440]
|
||||
|
||||
// qhaQdc = CalcEach1min.getQHAQDC(hQdc);// [1440]
|
||||
|
||||
return hQdc;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,382 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.calculation;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.table.KsThree;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/*
|
||||
* The calculation of Kp and related.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ----------- ---------- ---------- --------------------------
|
||||
* 05/14/2013 #989 qzhou Initial Creation
|
||||
* 03/18/2014 #1123 qzhou default k to 99999
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1
|
||||
*/
|
||||
public class CalcKp {
|
||||
private static final float MISSING_VAL = 99999.99f;
|
||||
|
||||
public CalcKp() {
|
||||
|
||||
}
|
||||
|
||||
public static float[] getKest(String station, float[] kIndex, float[] gamma) {
|
||||
float[] kest = new float[8];
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
int[] gammaLimit = CalcUtil.getKLimit(station); // .getGammaFromK(station,
|
||||
// gamma);
|
||||
if (kIndex[i] < 9) {
|
||||
kest[i] = kIndex[i]
|
||||
+ (gamma[i] - gammaLimit[(int) kIndex[i]])
|
||||
/ (gammaLimit[(int) kIndex[i] + 1] - gammaLimit[(int) kIndex[i]]);
|
||||
} else if (kIndex[i] < 999)
|
||||
kest[i] = 9.0f;
|
||||
else
|
||||
kest[i] = 99999f;
|
||||
|
||||
}
|
||||
|
||||
return kest;
|
||||
}
|
||||
|
||||
public static float getKest(String station, int kIndex, float gamma) {
|
||||
float kest = 99999f;
|
||||
|
||||
int[] gammaLimit = CalcUtil.getKLimit(station);
|
||||
if (kIndex < 9)
|
||||
kest = kIndex + (gamma - gammaLimit[kIndex])
|
||||
/ (gammaLimit[kIndex + 1] - gammaLimit[kIndex]);
|
||||
else if (kIndex < 999)
|
||||
kest = 9.0f;
|
||||
|
||||
return kest;
|
||||
}
|
||||
|
||||
/*
|
||||
* list of the station coefficient values in the order of 00-03, 03-06...
|
||||
*/
|
||||
public static ArrayList<KsThree> getKsThreeList(String station) {
|
||||
|
||||
ArrayList<KsThree> threeKsList = CalcUtil.getStationCoeff()
|
||||
.getStationByCode(station).getKsThree();// size 24
|
||||
|
||||
return threeKsList;
|
||||
}
|
||||
|
||||
public static List<Integer> getKsThree(Date time, String station, int k) {
|
||||
List<Integer> ks = new ArrayList<Integer>();
|
||||
|
||||
// KsThree ksThree = null;
|
||||
ArrayList<KsThree> ksThreeList = getKsThreeList(station);
|
||||
|
||||
if (ksThreeList != null && !ksThreeList.isEmpty()) {
|
||||
|
||||
int hour = CalcUtil.getSPTime(time).getHours();
|
||||
int period = hour / 3;// 24 -> 8
|
||||
|
||||
KsThree ksThree = ksThreeList.get(period);
|
||||
|
||||
if (ksThree != null)
|
||||
ks.add(getKsOfKsThree(k, ksThree));
|
||||
|
||||
ksThree = ksThreeList.get(period + 8);
|
||||
|
||||
if (ksThree != null)
|
||||
ks.add(getKsOfKsThree(k, ksThree));
|
||||
|
||||
ksThree = ksThreeList.get(period + 16);
|
||||
if (ksThree != null)
|
||||
ks.add(getKsOfKsThree(k, ksThree));
|
||||
|
||||
}
|
||||
|
||||
return ks;
|
||||
}
|
||||
|
||||
private static int getKsOfKsThree(int k, KsThree ksThree) {
|
||||
int ks = 99999;
|
||||
|
||||
if (k == 0)
|
||||
ks = ksThree.getK0();
|
||||
else if (k == 1)
|
||||
ks = ksThree.getK1();
|
||||
else if (k == 2)
|
||||
ks = ksThree.getK2();
|
||||
else if (k == 3)
|
||||
ks = ksThree.getK3();
|
||||
else if (k == 4)
|
||||
ks = ksThree.getK4();
|
||||
else if (k == 5)
|
||||
ks = ksThree.getK5();
|
||||
else if (k == 6)
|
||||
ks = ksThree.getK6();
|
||||
else if (k == 7)
|
||||
ks = ksThree.getK7();
|
||||
else if (k == 8)
|
||||
ks = ksThree.getK8();
|
||||
else if (k == 9)
|
||||
ks = ksThree.getK9();
|
||||
|
||||
return ks;
|
||||
}
|
||||
|
||||
public static float getKs(String station, int k, Date time)
|
||||
throws ParseException {
|
||||
float a = 0;
|
||||
float b = 0;
|
||||
float ks = 0;
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
|
||||
// int year = time.getYear();113
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(time);
|
||||
int year = cal.get(Calendar.YEAR);
|
||||
|
||||
Date date1 = sdf.parse(year + "-01-01");
|
||||
Date date2 = sdf.parse(year + "-02-14");
|
||||
Date date3 = sdf.parse(year + "-02-24");
|
||||
Date date4 = sdf.parse(year + "-03-06");
|
||||
Date date5 = sdf.parse(year + "-03-16");
|
||||
Date date6 = sdf.parse(year + "-04-16");
|
||||
Date date7 = sdf.parse(year + "-04-26");
|
||||
Date date8 = sdf.parse(year + "-05-06");
|
||||
Date date9 = sdf.parse(year + "-05-16");
|
||||
Date date10 = sdf.parse(year + "-08-17");
|
||||
Date date11 = sdf.parse(year + "-08-27");
|
||||
Date date12 = sdf.parse(year + "-09-06");
|
||||
Date date13 = sdf.parse(year + "-09-16");
|
||||
Date date14 = sdf.parse(year + "-10-17");
|
||||
Date date15 = sdf.parse(year + "-10-27");
|
||||
Date date16 = sdf.parse(year + "-11-06");
|
||||
Date date17 = sdf.parse(year + "-11-16");
|
||||
Date date18 = sdf.parse(year + "-12-31");
|
||||
|
||||
Date date2Leep = sdf.parse(year + "-02-15");
|
||||
Date date3Leep = sdf.parse(year + "-02-25");
|
||||
|
||||
List<Integer> ksThree = getKsThree(time, station, k);
|
||||
|
||||
if (time.compareTo(date1) >= 0 && time.compareTo(date2) < 0) {
|
||||
ks = (float) ksThree.get(0) / 3;
|
||||
} else if (time.compareTo(date4) >= 0 && time.compareTo(date5) < 0) {
|
||||
ks = (float) (0.25f * ksThree.get(0) + 0.75f * ksThree.get(1)) / 3;
|
||||
} else if (time.compareTo(date5) >= 0 && time.compareTo(date6) < 0) {
|
||||
ks = (float) ksThree.get(1) / 3;
|
||||
} else if (time.compareTo(date6) >= 0 && time.compareTo(date7) < 0) {
|
||||
ks = (float) (0.75f * ksThree.get(1) + 0.25f * ksThree.get(2)) / 3;
|
||||
} else if (time.compareTo(date7) >= 0 && time.compareTo(date8) < 0) {
|
||||
ks = (float) (0.5f * ksThree.get(1) + 0.5f * ksThree.get(2)) / 3;
|
||||
} else if (time.compareTo(date8) >= 0 && time.compareTo(date9) < 0) {
|
||||
ks = (float) (0.25f * ksThree.get(1) + 0.75f * ksThree.get(2)) / 3;
|
||||
} else if (time.compareTo(date9) >= 0 && time.compareTo(date10) < 0) {
|
||||
ks = (float) ksThree.get(2) / 3;
|
||||
} else if (time.compareTo(date10) >= 0 && time.compareTo(date11) < 0) {
|
||||
ks = (float) (0.75f * ksThree.get(2) + 0.25f * ksThree.get(1)) / 3;
|
||||
} else if (time.compareTo(date11) >= 0 && time.compareTo(date12) < 0) {
|
||||
ks = (float) (0.5f * ksThree.get(2) + 0.5f * ksThree.get(1)) / 3;
|
||||
} else if (time.compareTo(date12) >= 0 && time.compareTo(date13) < 0) {
|
||||
ks = (float) (0.25f * ksThree.get(2) + 0.75f * ksThree.get(1)) / 3;
|
||||
} else if (time.compareTo(date13) >= 0 && time.compareTo(date14) < 0) {
|
||||
ks = (float) ksThree.get(1) / 3;
|
||||
} else if (time.compareTo(date14) >= 0 && time.compareTo(date15) < 0) {
|
||||
ks = (float) (0.75f * ksThree.get(1) + 0.25f * ksThree.get(0)) / 3;
|
||||
} else if (time.compareTo(date15) >= 0 && time.compareTo(date16) < 0) {
|
||||
ks = (float) (0.5f * ksThree.get(1) + 0.5f * ksThree.get(0)) / 3;
|
||||
} else if (time.compareTo(date16) >= 0 && time.compareTo(date17) < 0) {
|
||||
ks = (float) (0.25f * ksThree.get(1) + 0.75f * ksThree.get(0)) / 3;
|
||||
} else if (time.compareTo(date17) >= 0 && time.compareTo(date18) <= 0) {
|
||||
ks = (float) ksThree.get(0) / 3;
|
||||
} else if (CalcUtil.isLeapYear(year)) {
|
||||
if (time.compareTo(date2Leep) >= 0 && time.compareTo(date3Leep) < 0) {
|
||||
ks = (float) (0.75f * ksThree.get(0) + 0.25f * ksThree.get(1)) / 3;
|
||||
} else if (time.compareTo(date3Leep) >= 0
|
||||
&& time.compareTo(date4) < 0) {
|
||||
ks = (float) (0.5f * ksThree.get(0) + 0.5f * ksThree.get(1)) / 3;
|
||||
}
|
||||
} else {
|
||||
if (time.compareTo(date2) >= 0 && time.compareTo(date3) < 0) {
|
||||
ks = (float) (0.75f * ksThree.get(0) + 0.25f * ksThree.get(1)) / 3;
|
||||
} else if (time.compareTo(date3) >= 0 && time.compareTo(date4) < 0) {
|
||||
ks = (float) (0.5f * ksThree.get(0) + 0.5f * ksThree.get(1)) / 3;
|
||||
}
|
||||
}
|
||||
|
||||
return ks;
|
||||
}
|
||||
|
||||
// protected float[] getKs(String station, float[] kest) {
|
||||
// float a = 0;
|
||||
// float b = 0;
|
||||
// float[] ks = new float[8];
|
||||
//
|
||||
// Map<Float, Float> abCoeff = CalcUtil.getCoeffAandB(station);
|
||||
// if (abCoeff.size() != 8)
|
||||
// return ks;
|
||||
//
|
||||
// int i = 0;
|
||||
// Iterator<?> iter = abCoeff.entrySet().iterator();
|
||||
// while (iter.hasNext()) {
|
||||
// @SuppressWarnings("unchecked")
|
||||
// Map.Entry<Float, Float> mEntry = (Map.Entry<Float, Float>) iter.next();
|
||||
//
|
||||
// a = mEntry.getKey();
|
||||
// b = mEntry.getValue();
|
||||
// ks[i] = a + b * kest[i];
|
||||
// i++;
|
||||
// }
|
||||
//
|
||||
// return ks;
|
||||
// }
|
||||
|
||||
public static float getKs(String station, float kest, String timePrd) {
|
||||
float a = 0;
|
||||
float b = 0;
|
||||
float ks = 0;
|
||||
|
||||
Map<Float, Float> abCoeff = CalcUtil.getCoeffAandB(station);
|
||||
if (abCoeff.size() != 8)
|
||||
return ks;
|
||||
|
||||
int j = 0;
|
||||
if (timePrd.equalsIgnoreCase("00-03"))
|
||||
j = 0;
|
||||
else if (timePrd.equalsIgnoreCase("03-06"))
|
||||
j = 1;
|
||||
else if (timePrd.equalsIgnoreCase("06-09"))
|
||||
j = 2;
|
||||
else if (timePrd.equalsIgnoreCase("09-12"))
|
||||
j = 3;
|
||||
else if (timePrd.equalsIgnoreCase("12-15"))
|
||||
j = 4;
|
||||
else if (timePrd.equalsIgnoreCase("15-18"))
|
||||
j = 5;
|
||||
else if (timePrd.equalsIgnoreCase("18-21"))
|
||||
j = 6;
|
||||
else if (timePrd.equalsIgnoreCase("21-24"))
|
||||
j = 7;
|
||||
|
||||
int i = 0;
|
||||
Iterator<?> iter = abCoeff.entrySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map.Entry<Float, Float> mEntry = (Map.Entry<Float, Float>) iter
|
||||
.next();
|
||||
|
||||
if (i == j) {
|
||||
a = mEntry.getKey();
|
||||
b = mEntry.getValue();
|
||||
ks = a + b * kest;
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return ks;
|
||||
}
|
||||
|
||||
public static int getAest(String station, int kIndex) {
|
||||
return CalcUtil.getK2a(kIndex);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static float[] getKpEst(String[] station, float[] ks) {
|
||||
float kpEst[] = new float[ks.length];
|
||||
float[][] wcoeff = new float[station.length][ks.length];
|
||||
|
||||
if (ks.length != 8)
|
||||
return kpEst;
|
||||
|
||||
for (int i = 0; i < station.length; i++) {
|
||||
Map<String, Float> coeff = CalcUtil.getCoeffW(station[i]);
|
||||
int j = 0;
|
||||
|
||||
Iterator<?> iter = coeff.entrySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
wcoeff[i][j] = ((Map.Entry<String, Float>) iter.next())
|
||||
.getValue();
|
||||
j++;
|
||||
}
|
||||
}
|
||||
|
||||
float sumW = 0;
|
||||
float sumWK = 0;
|
||||
|
||||
for (int j = 0; j < ks.length; j++) {
|
||||
for (int i = 0; i < station.length; i++) {
|
||||
sumW += wcoeff[i][j];
|
||||
sumWK += wcoeff[i][j] * ks[i];
|
||||
}
|
||||
// kpEst[i] = (float) (Math.round(3 * sumWK / sumW)) / 3;
|
||||
kpEst[j] = sumWK / sumW;
|
||||
kpEst[j] = (int) kpEst[j] + CalcUtil.getThird(kpEst[j]);
|
||||
}
|
||||
|
||||
return kpEst;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static float getKpEst(String[] station, float ks, String fitTime) {
|
||||
float kpEst = 0;
|
||||
float[] wcoeff = new float[8];
|
||||
|
||||
for (int i = 0; i < station.length; i++) {
|
||||
Map<String, Float> coeff = CalcUtil.getCoeffW(station[i]);
|
||||
int j = 0;
|
||||
Iterator<?> iter = coeff.entrySet().iterator();
|
||||
while (iter.hasNext()) {
|
||||
if (((Map.Entry<String, Float>) iter.next()).getKey()
|
||||
.equalsIgnoreCase(fitTime)) {
|
||||
wcoeff[i] = ((Map.Entry<String, Float>) iter.next())
|
||||
.getValue();
|
||||
break;
|
||||
}
|
||||
j++;
|
||||
}
|
||||
}
|
||||
|
||||
float sumW = 0;
|
||||
float sumWK = 0;
|
||||
|
||||
for (int i = 0; i < station.length; i++) {
|
||||
sumW += wcoeff[i];
|
||||
sumWK += wcoeff[i] * ks;
|
||||
}
|
||||
|
||||
kpEst = sumWK / sumW;
|
||||
kpEst = (int) kpEst + CalcUtil.getThird(kpEst);
|
||||
|
||||
return kpEst;
|
||||
}
|
||||
|
||||
public static String[] getKp(float kpEst[], String[] kpModifier) {
|
||||
String[] kp = new String[kpEst.length];
|
||||
if (kpEst.length != kpModifier.length)
|
||||
return kp;
|
||||
|
||||
for (int i = 0; i < kpEst.length; i++) {
|
||||
int k = Math.round(kpEst[i]);
|
||||
kp[i] = k + kpModifier[i];
|
||||
}
|
||||
|
||||
return kp;
|
||||
}
|
||||
|
||||
public static String getKp(float kpEst, String kpModifier) {
|
||||
int kp = Math.round(kpEst);
|
||||
|
||||
return kp + kpModifier;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,552 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.calculation;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.table.KFitTime;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.util.KStationCoefficientLookup;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/*
|
||||
* The k index and decoder calculation utility.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ----------- ---------- ---------- --------------------------
|
||||
* 05/14/2013 #989 qzhou Initial Creation
|
||||
* 06/23/2014 R4152 qzhou Touched up 3 functions
|
||||
* 07/22/2014 R4152 qzhou Fixed getMedian. This func. is uesd very rare if ever
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1
|
||||
*/
|
||||
|
||||
public class CalcUtil {
|
||||
private static final float MISSING_VAL = 99999.99f;
|
||||
|
||||
private static final float K_EXPONENT = 3.3f;
|
||||
|
||||
private static KStationCoefficientLookup stationCoeff = KStationCoefficientLookup
|
||||
.getInstance();
|
||||
|
||||
// Gamma limit table
|
||||
private static enum Limit {
|
||||
K0(0), K1(5), K2(10), K3(20), K4(40), K5(70), K6(120), K7(200), K8(330), K9(
|
||||
500);
|
||||
|
||||
private final int kConst;
|
||||
|
||||
private Limit(int kConst) {
|
||||
this.kConst = kConst;
|
||||
}
|
||||
}
|
||||
|
||||
public static int getKConst(int k) {
|
||||
int kConst = 0;
|
||||
if (k == 0)
|
||||
kConst = Limit.K0.kConst;
|
||||
else if (k == 1)
|
||||
kConst = Limit.K1.kConst;
|
||||
else if (k == 2)
|
||||
kConst = Limit.K2.kConst;
|
||||
else if (k == 3)
|
||||
kConst = Limit.K3.kConst;
|
||||
else if (k == 4)
|
||||
kConst = Limit.K4.kConst;
|
||||
else if (k == 5)
|
||||
kConst = Limit.K5.kConst;
|
||||
else if (k == 6)
|
||||
kConst = Limit.K6.kConst;
|
||||
else if (k == 7)
|
||||
kConst = Limit.K7.kConst;
|
||||
else if (k == 8)
|
||||
kConst = Limit.K8.kConst;
|
||||
else if (k == 9)
|
||||
kConst = Limit.K9.kConst;
|
||||
|
||||
return kConst;
|
||||
}
|
||||
|
||||
// A-index table
|
||||
private static enum K2a {
|
||||
a0(0), a1(3), a2(7), a3(15), a4(27), a5(48), a6(80), a7(140), a8(240), a9(
|
||||
400);
|
||||
|
||||
private final int a;
|
||||
|
||||
private K2a(int a) {
|
||||
this.a = a;
|
||||
}
|
||||
}
|
||||
|
||||
public static int getK2a(int k) {
|
||||
int a = 0;
|
||||
if (k == 0)
|
||||
a = K2a.a0.a;
|
||||
else if (k == 1)
|
||||
a = K2a.a1.a;
|
||||
else if (k == 2)
|
||||
a = K2a.a2.a;
|
||||
else if (k == 3)
|
||||
a = K2a.a3.a;
|
||||
else if (k == 4)
|
||||
a = K2a.a4.a;
|
||||
else if (k == 5)
|
||||
a = K2a.a5.a;
|
||||
else if (k == 6)
|
||||
a = K2a.a6.a;
|
||||
else if (k == 7)
|
||||
a = K2a.a7.a;
|
||||
else if (k == 8)
|
||||
a = K2a.a8.a;
|
||||
else if (k == 9)
|
||||
a = K2a.a9.a;
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
public static KStationCoefficientLookup getStationCoeff() {
|
||||
return stationCoeff;
|
||||
}
|
||||
|
||||
public static int getK9Limit(String station) throws NumberFormatException {
|
||||
int k9 = 0;
|
||||
|
||||
String k9Limit = getStationCoeff().getStationByCode(station)
|
||||
.getK9Limit();
|
||||
k9 = Integer.parseInt(k9Limit);
|
||||
|
||||
return k9;
|
||||
}
|
||||
|
||||
public static float getLongitude(String station)
|
||||
throws NumberFormatException {
|
||||
float lon = 0;
|
||||
if (station != null && !station.equalsIgnoreCase("")) {
|
||||
String longitude = getStationCoeff().getStationByCode(station)
|
||||
.getLongitude();
|
||||
lon = Float.parseFloat(longitude);
|
||||
}
|
||||
return lon;
|
||||
}
|
||||
|
||||
/*
|
||||
* map of the A and the B values in the order of 00-03, 03-06...
|
||||
*/
|
||||
public static Map<Float, Float> getCoeffAandB(String station) {
|
||||
Map<Float, Float> abCoeff = new HashMap<Float, Float>();
|
||||
|
||||
List<KFitTime> fitTime = getStationCoeff().getStationByCode(station)
|
||||
.getKFitTime();
|
||||
if (fitTime.size() != 8)
|
||||
return abCoeff;
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
float a = fitTime.get(i).getCoeffA();
|
||||
float b = fitTime.get(i).getCoeffB();
|
||||
abCoeff.put(a, b);
|
||||
}
|
||||
|
||||
return abCoeff;
|
||||
}
|
||||
|
||||
/*
|
||||
* map of the time period and the W values in the order of 00-03, 03-06...
|
||||
*/
|
||||
public static Map<String, Float> getCoeffW(String station) {
|
||||
Map<String, Float> wCoeff = new HashMap<String, Float>();
|
||||
|
||||
List<KFitTime> fitTime = getStationCoeff().getStationByCode(station)
|
||||
.getKFitTime();
|
||||
if (fitTime.size() != 8)
|
||||
return wCoeff;
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
String a = fitTime.get(i).getKey();
|
||||
float b = fitTime.get(i).getCoeffW();
|
||||
wCoeff.put(a, b);
|
||||
}
|
||||
|
||||
return wCoeff;
|
||||
}
|
||||
|
||||
public static int[] getKLimit(String station) {
|
||||
int[] kLimit = new int[10];
|
||||
int k9Limit = getK9Limit(station);
|
||||
for (int i = 0; i < kLimit.length; i++) {
|
||||
kLimit[i] = Math.round(k9Limit * getKConst(i) / 500.0f);
|
||||
}
|
||||
return kLimit;
|
||||
}
|
||||
|
||||
// public static int[] getAIndex(String station, float[] k-index) {
|
||||
// int[] aIndex = new int[10];
|
||||
// //int k9Limit = getK9Limit(station);
|
||||
// for (int i = 0; i < kLimit.length; i++) {
|
||||
// aIndex[i] = Math.round( getK2a(i));
|
||||
// }
|
||||
// return aIndex;
|
||||
// }
|
||||
|
||||
public static int getKfromTable(int[] kLimit, float gamma) {
|
||||
int kIndex;
|
||||
|
||||
int i = 0;
|
||||
for (i = 0; i < 10; i++) {
|
||||
if (gamma > kLimit[i])
|
||||
continue;
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
// take the lower of i. this step eq. K_limit = K9limit * [5, 10, 20,
|
||||
// 40...
|
||||
if (i > 0)
|
||||
i = i - 1;
|
||||
|
||||
if (i <= 9)
|
||||
kIndex = i;
|
||||
else
|
||||
kIndex = 9;
|
||||
|
||||
return kIndex;
|
||||
}
|
||||
|
||||
// public static int getGammaFromK(String station, int kIndex) {
|
||||
// int gamma = getK9Limit(station) * getKConst(kIndex) / 500;
|
||||
//
|
||||
// return gamma;
|
||||
// }
|
||||
|
||||
// assume db time format yyyy-mm-dd hh:mm:ss
|
||||
public static Date getSPTime(Date currTime) {
|
||||
Date spTime = currTime;
|
||||
|
||||
int hour = currTime.getHours();
|
||||
|
||||
if (hour >= 0 && hour < 3)
|
||||
hour = 0;
|
||||
else if (hour >= 3 && hour < 6)
|
||||
hour = 3;
|
||||
else if (hour >= 6 && hour < 9)
|
||||
hour = 6;
|
||||
else if (hour >= 9 && hour < 12)
|
||||
hour = 9;
|
||||
else if (hour >= 12 && hour < 15)
|
||||
hour = 12;
|
||||
else if (hour >= 15 && hour < 18)
|
||||
hour = 15;
|
||||
else if (hour >= 18 && hour < 21)
|
||||
hour = 18;
|
||||
else if (hour >= 21 && hour < 24)
|
||||
hour = 21;
|
||||
|
||||
spTime.setHours(hour);
|
||||
spTime.setMinutes(0);
|
||||
spTime.setSeconds(0);
|
||||
|
||||
return spTime;
|
||||
}
|
||||
|
||||
public static Date getEPTime(Date currTime) {
|
||||
Date epTime = (Date) currTime.clone();
|
||||
|
||||
int hour = currTime.getHours();
|
||||
if (hour >= 0 && hour < 3)
|
||||
hour = 3;
|
||||
else if (hour >= 3 && hour < 6)
|
||||
hour = 6;
|
||||
else if (hour >= 6 && hour < 9)
|
||||
hour = 9;
|
||||
else if (hour >= 9 && hour < 12)
|
||||
hour = 12;
|
||||
else if (hour >= 12 && hour < 15)
|
||||
hour = 15;
|
||||
else if (hour >= 15 && hour < 18)
|
||||
hour = 18;
|
||||
else if (hour >= 18 && hour < 21)
|
||||
hour = 21;
|
||||
else if (hour >= 21 && hour < 24)
|
||||
hour = 0;
|
||||
|
||||
if (hour != 0)
|
||||
epTime.setHours(hour);
|
||||
else {
|
||||
int day = currTime.getDate() + 1;
|
||||
epTime.setDate(day);
|
||||
epTime.setHours(hour);
|
||||
}
|
||||
|
||||
epTime.setMinutes(0);
|
||||
epTime.setSeconds(0);
|
||||
|
||||
return epTime;
|
||||
}
|
||||
|
||||
public static boolean isHalfMissing(float[] items) {
|
||||
boolean halfMissaing = false;
|
||||
|
||||
int i = 0;
|
||||
for (i = 0; i < items.length; i++) {
|
||||
if (items[i] == MISSING_VAL)
|
||||
i++;
|
||||
}
|
||||
if (i > items.length / 2)
|
||||
halfMissaing = true;
|
||||
|
||||
return halfMissaing;
|
||||
}
|
||||
|
||||
public static float getThird(float kpEst) {
|
||||
float half = 0.333333f / 2;
|
||||
float x = kpEst - (int) kpEst; // get decimal fraction
|
||||
|
||||
if (x >= 0 && x <= half)
|
||||
x = 0;
|
||||
else if (x >= half && x <= 2 * half)
|
||||
x = 0.333333f;
|
||||
else if (x >= 2 * half && x <= 3 * half)
|
||||
x = 0.333333f;
|
||||
else if (x >= 3 * half && x <= 4 * half)
|
||||
x = 0.666666f;
|
||||
else if (x >= 4 * half && x <= 5 * half)
|
||||
x = 0.666666f;
|
||||
else if (x >= 5 * half && x <= 6 * half)
|
||||
x = 1;
|
||||
|
||||
return x;
|
||||
}
|
||||
|
||||
public static float maxValue(float[] dev) {
|
||||
float max = -99999;
|
||||
for (int i = 0; i < dev.length; i++) {
|
||||
if (dev[i] > max && dev[i] < MISSING_VAL) {
|
||||
max = dev[i];
|
||||
}
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
public static float minValue(float[] dev) {
|
||||
float min = 99999;
|
||||
for (int i = 0; i < dev.length; i++) {
|
||||
if (dev[i] < min && dev[i] > -MISSING_VAL) {
|
||||
min = dev[i];
|
||||
}
|
||||
}
|
||||
return min;
|
||||
}
|
||||
|
||||
/*
|
||||
* 10 element floating point array
|
||||
*/
|
||||
public static float[] geKLength() {
|
||||
float[] kLength = new float[10];
|
||||
|
||||
kLength[0] = 0;
|
||||
for (int i = 1; i < 10; i++) {
|
||||
kLength[i] = (float) Math.exp(K_EXPONENT * Math.log(i));
|
||||
if (kLength[i] > 1080)
|
||||
kLength[i] = 1080;
|
||||
}
|
||||
|
||||
return kLength;
|
||||
}
|
||||
|
||||
// uri: /geomag/2013-05-20_00:00:00.0/HAD/101/GEOMAG
|
||||
public static String getSourceFromUri(String uri) {
|
||||
if (uri != null && uri.length() >= 37)
|
||||
return uri.substring(34, 37);
|
||||
else
|
||||
return "";
|
||||
}
|
||||
|
||||
public static String getStationFromUri(String uri) {
|
||||
if (uri != null && uri.length() >= 37)
|
||||
return uri.substring(30, 33);
|
||||
else
|
||||
return "";
|
||||
}
|
||||
|
||||
public static Date getTimeFromUri(String uri) throws ParseException {
|
||||
String format = "yyyy-MM-dd'_'HH:mm:ss.s";
|
||||
SimpleDateFormat sdf = new SimpleDateFormat(format);
|
||||
|
||||
if (uri != null && uri.length() >= 37) {
|
||||
String time = uri.substring(8, 29);
|
||||
Date date = sdf.parse(time);
|
||||
return date;
|
||||
} else
|
||||
return new Date();
|
||||
}
|
||||
|
||||
// get the front part before the source in the uri
|
||||
public static String separateSourceFrontUri(String uri) {
|
||||
if (uri != null && uri.length() >= 37)
|
||||
return uri.substring(0, 34);
|
||||
else
|
||||
return "";
|
||||
}
|
||||
|
||||
public static float[] toFloatArray(List<Float> list) {
|
||||
float[] ret = new float[list.size()];
|
||||
int i = 0;
|
||||
for (Float e : list)
|
||||
ret[i++] = e.floatValue();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static int[] toIntArray(List<Integer> list) {
|
||||
int[] ret = new int[list.size()];
|
||||
int i = 0;
|
||||
for (Integer e : list)
|
||||
ret[i++] = e.intValue();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public static boolean isLeapYear(int year) {
|
||||
boolean isLeap;
|
||||
|
||||
if (year % 400 == 0)
|
||||
isLeap = true;
|
||||
else if (year % 100 == 0)
|
||||
isLeap = false;
|
||||
else if (year % 4 == 0)
|
||||
isLeap = true;
|
||||
else
|
||||
isLeap = false;
|
||||
|
||||
return isLeap;
|
||||
}
|
||||
|
||||
// public static String getMonthDayFromNumber(int year, int number) {
|
||||
// //CL22013041.min
|
||||
// String temp = "";
|
||||
// String month = "";
|
||||
// String day = "";
|
||||
// String monthDay = "";
|
||||
// Boolean isLeapYear = isLeapYear( year);
|
||||
// int[] days = {31,28,31,30,31,30,31,31,30,31,30,31};
|
||||
// int[] leapDays = {31,29,31,30,31,30,31,31,30,31,30,31};
|
||||
// Calendar cal = Calendar.getInstance();
|
||||
// cal.get(Calendar.DAY_OF_MONTH);
|
||||
// cal.get(Calendar.MONTH);
|
||||
// cal.get(Calendar.DAY_OF_YEAR);
|
||||
// cal.set(Calendar.DAY_OF_YEAR, number);
|
||||
// int[] num =
|
||||
// if (isLeapYear) {
|
||||
//
|
||||
// }
|
||||
// else {
|
||||
//
|
||||
// }
|
||||
// if (number<=31){ //JEJ, m130212.txt
|
||||
// month = "01";
|
||||
// day = String.valueOf(number);
|
||||
// }
|
||||
// else if (number > 31 && number <= 59){
|
||||
// month = "02";
|
||||
// day = String.valueOf(number-31);
|
||||
// }
|
||||
// else if (number > 31 && number <= 59){
|
||||
// month = "03";
|
||||
// day = String.valueOf(number-31);
|
||||
// }
|
||||
// else if (fileName.startsWith("ha")){ CNB,NGK, WNG
|
||||
// temp = fileName.substring(3, 10);
|
||||
// year = temp.substring(0, 4);
|
||||
// }
|
||||
//
|
||||
// return monthDay;
|
||||
// }
|
||||
|
||||
public static String getTimeFromFileName(String fileName) { // CL22013041.min
|
||||
String time = "";
|
||||
String temp = "";
|
||||
String year = "";
|
||||
String month = "";
|
||||
String day = "";
|
||||
String num = "";
|
||||
|
||||
Calendar cal = Calendar.getInstance();
|
||||
|
||||
if (fileName.startsWith("m")) { // JEJ, m130212.txt
|
||||
temp = fileName.substring(1, 7);
|
||||
year = "20" + temp.substring(4, 6);
|
||||
month = temp.substring(2, 4);
|
||||
day = temp.substring(0, 2);
|
||||
} else if (fileName.startsWith("ha")) {
|
||||
temp = fileName.substring(2, 9);
|
||||
year = temp.substring(3, 7);
|
||||
num = temp.substring(0, 3);
|
||||
try {
|
||||
cal.set(Calendar.DAY_OF_YEAR, Integer.parseInt(num));
|
||||
} catch (NumberFormatException e) {
|
||||
|
||||
}
|
||||
month = String.valueOf(cal.get(Calendar.MONTH));
|
||||
day = String.valueOf(cal.get(Calendar.DAY_OF_MONTH));
|
||||
} else if (fileName.startsWith("BOU") || fileName.startsWith("CL2")
|
||||
|| fileName.startsWith("CMO") || fileName.startsWith("OTT")
|
||||
|| fileName.startsWith("MEA")) {
|
||||
temp = fileName.substring(3, 10);
|
||||
year = temp.substring(0, 4);
|
||||
num = temp.substring(4, 7);
|
||||
|
||||
try {
|
||||
cal.set(Calendar.DAY_OF_YEAR, Integer.parseInt(num));
|
||||
} catch (NumberFormatException e) {
|
||||
|
||||
}
|
||||
month = String.valueOf(cal.get(Calendar.MONTH) + 1);
|
||||
day = String.valueOf(cal.get(Calendar.DAY_OF_MONTH));
|
||||
} else if (fileName.startsWith("ha") || fileName.startsWith("CNB")
|
||||
|| fileName.startsWith("OTT") || fileName.startsWith("WNG")) {
|
||||
temp = fileName.substring(3, 10);
|
||||
year = temp.substring(0, 4);
|
||||
month = temp.substring(4, 6);
|
||||
day = temp.substring(6, 8);
|
||||
}
|
||||
|
||||
if (month.length() == 1)
|
||||
month = "0" + month;
|
||||
if (day.length() == 1)
|
||||
day = "0" + day;
|
||||
time = year + "-" + month + "-" + day;
|
||||
return time;
|
||||
}
|
||||
|
||||
public static float getMedian(float[] array) {
|
||||
float median = 0;
|
||||
if (array.length <= 1)
|
||||
return array[0];
|
||||
|
||||
float[] arraySort = array.clone();
|
||||
Arrays.sort(arraySort);
|
||||
|
||||
// remove missing data
|
||||
List<Float> newArray = new ArrayList<Float>();
|
||||
for (int k = 0; k < arraySort.length; k++) {
|
||||
if (arraySort[k] != MISSING_VAL)
|
||||
newArray.add(arraySort[k]);
|
||||
}
|
||||
|
||||
int size = newArray.size();
|
||||
if (size % 2 == 0)
|
||||
median = (newArray.get(size / 2) + newArray.get(size / 2 - 1)) / 2;
|
||||
else
|
||||
median = newArray.get((size - 1) / 2);
|
||||
|
||||
return median;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.dao;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagAvg;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -11,19 +13,20 @@ import org.springframework.orm.hibernate3.HibernateTemplate;
|
|||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagAvg;
|
||||
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
|
||||
/**
|
||||
* Record implementation for geomag avgDao.
|
||||
* Record implementation for geomag avgDao.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ---------------- --------------------------
|
||||
* 08/14/2013 T989 qzhou Initial creation.
|
||||
* 03/13/2014 sgurung Added method purgeDataByRefTime()
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
|
@ -31,16 +34,16 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
*/
|
||||
|
||||
public class GeoMagAvgDao extends CoreDao {
|
||||
/**
|
||||
/**
|
||||
* Creates a new GribModelDao
|
||||
*/
|
||||
public GeoMagAvgDao() {
|
||||
super(DaoConfig.forClass(GeoMagAvg.class));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves a GeoMagAvgId based on the given id
|
||||
*
|
||||
*
|
||||
* @param id
|
||||
* The given ID number
|
||||
* @return The GeoMagAvgId
|
||||
|
@ -49,13 +52,14 @@ public class GeoMagAvgDao extends CoreDao {
|
|||
return (GeoMagAvg) super.queryById(id);
|
||||
}
|
||||
|
||||
public int getAreaId (int id){
|
||||
return queryById(id).getId();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves data from postGres
|
||||
*
|
||||
* @return Criteria list
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<GeoMagAvg> getAvgForStation(final String stationCode, final Date start, final Date end) {
|
||||
public List<GeoMagAvg> getAvgForStation(final String stationCode,
|
||||
final Date start, final Date end) {
|
||||
return (List<GeoMagAvg>) txTemplate.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
|
@ -64,19 +68,17 @@ public class GeoMagAvgDao extends CoreDao {
|
|||
Criteria crit = sess.createCriteria(GeoMagAvg.class);
|
||||
Criterion where1 = Restrictions.eq("stationCode", stationCode);
|
||||
crit.add(where1);
|
||||
// Criterion where2 = Restrictions.gt("avgTime", start);
|
||||
// crit.add(where2);
|
||||
// Criterion where3 = Restrictions.le("avgTime", end);
|
||||
// crit.add(where3);
|
||||
Criterion where2 = Restrictions.between("avgTime", start, end);//include bounds, but don't need bounds
|
||||
crit.add(where2);
|
||||
|
||||
Criterion where2 = Restrictions.between("avgTime", start, end);
|
||||
crit.add(where2);
|
||||
return crit.list();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<GeoMagAvg> getSingleAvg(final String stationCode, final Date date) {
|
||||
public List<GeoMagAvg> getSingleAvg(final String stationCode,
|
||||
final Date date) {
|
||||
return (List<GeoMagAvg>) txTemplate.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
|
@ -86,9 +88,17 @@ public class GeoMagAvgDao extends CoreDao {
|
|||
Criterion where1 = Restrictions.eq("stationCode", stationCode);
|
||||
crit.add(where1);
|
||||
Criterion where2 = Restrictions.eq("avgTime", date);
|
||||
crit.add(where2);
|
||||
crit.add(where2);
|
||||
return crit.list();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public int purgeDataByRefTime(Date refTime) throws DataAccessLayerException {
|
||||
DatabaseQuery deleteStmt = new DatabaseQuery(this.daoClass);
|
||||
// add 30 minutes to get hourly average reference time
|
||||
Date avgTime = new Date(refTime.getTime() + (30 * 60000));
|
||||
deleteStmt.addQueryParam("avgTime", avgTime);
|
||||
return this.deleteByCriteria(deleteStmt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.dao;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagAvg;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK1min;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK3hr;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagRecord;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
|
||||
|
@ -12,21 +17,18 @@ import com.raytheon.uf.common.datastorage.records.AbstractStorageRecord;
|
|||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagAvg;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK1min;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK3hr;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagRecord;
|
||||
import com.raytheon.uf.edex.database.purge.PurgeLogger;
|
||||
|
||||
/**
|
||||
* This is a Data Access Object (DAO) driver to interact with geomag database table and HDF5 data store.
|
||||
* This is a Data Access Object (DAO) driver to interact with geomag database
|
||||
* table and HDF5 data store.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 04/2013 975 S. Gurung Initial Creation
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 04/2013 975 S. Gurung Initial Creation
|
||||
* 07/16/2013 975 Q. Zhou Added fields.
|
||||
* </pre>
|
||||
*
|
||||
|
@ -34,116 +36,150 @@ import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagRecord;
|
|||
* @version 1.0
|
||||
*/
|
||||
public class GeoMagDao extends PluginDao {
|
||||
|
||||
/** The source data access object */
|
||||
private GeoMagAvgDao avgDao = new GeoMagAvgDao();
|
||||
private GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao();
|
||||
private GeoMagK1minDao k1minDao = new GeoMagK1minDao();
|
||||
|
||||
/** The source data access object */
|
||||
private GeoMagAvgDao avgDao = new GeoMagAvgDao();
|
||||
|
||||
private GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao();
|
||||
|
||||
private GeoMagK1minDao k1minDao = new GeoMagK1minDao();
|
||||
|
||||
public GeoMagDao(String pluginName) throws PluginException {
|
||||
super(pluginName);
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public StorageStatus persistToHDF5(PluginDataObject... records)
|
||||
// throws PluginException {
|
||||
// long t0 = System.currentTimeMillis();
|
||||
//
|
||||
// // NOTE: currently making the assumption that models aren't
|
||||
// // mixed in the records aggregate. If this isn't true,
|
||||
// // some pre-processing will be needed.
|
||||
// Map<PointDataContainer, List<PointDataView>> containerMap = new HashMap<PointDataContainer, List<PointDataView>>(
|
||||
// records.length);
|
||||
// Map<PointDataContainer, File> fileMap = new HashMap<PointDataContainer, File>();
|
||||
//
|
||||
// for (PluginDataObject p : records) {
|
||||
// if (p instanceof IPointData) {
|
||||
// PointDataView pdv = ((IPointData) p).getPointDataView();
|
||||
// List<PointDataView> views = containerMap
|
||||
// .get(pdv.getContainer());
|
||||
// if (views == null) {
|
||||
// views = new ArrayList<PointDataView>();
|
||||
// containerMap.put(pdv.getContainer(), views);
|
||||
// }
|
||||
// views.add(pdv);
|
||||
// File file = fileMap.get(pdv.getContainer());
|
||||
// if (file == null) {
|
||||
// file = getFullFilePath(p);
|
||||
// System.out.println("***file " +file.toString());
|
||||
// fileMap.put(pdv.getContainer(), file);
|
||||
// }
|
||||
//
|
||||
// }DataTime
|
||||
// }
|
||||
//
|
||||
// List<StorageStatus> ssList = new ArrayList<StorageStatus>();
|
||||
// try {
|
||||
// for (PointDataContainer container : containerMap.keySet()) {
|
||||
// IDataStore ds = DataStoreFactory.getDataStore(fileMap
|
||||
// .get(container));
|
||||
// StorageProperties sp = new StorageProperties();
|
||||
// String compression = PluginRegistry.getInstance()
|
||||
// .getRegisteredObject(pluginName).getCompression();
|
||||
// if (compression != null) {
|
||||
// sp.setCompression(StorageProperties.Compression
|
||||
// .valueOf(compression));
|
||||
// }
|
||||
//
|
||||
// Set<String> params = container.getParameters();
|
||||
// for (String param : params) {
|
||||
// try {
|
||||
// IDataRecord idr = container.getParameterRecord(param);
|
||||
// ds.addDataRecord(idr, sp);
|
||||
// } catch (StorageException e) {
|
||||
// throw new PluginException("Error adding record", e);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// try {
|
||||
// StorageStatus ss = ds.store(StoreOp.APPEND);
|
||||
// if (ss.getOperationPerformed() == StoreOp.APPEND) {
|
||||
// // increment the indices
|
||||
// List<PointDataView> views = containerMap.get(container);
|
||||
// int idx = (int) ss.getIndexOfAppend()[0];
|
||||
// container.incrementIds(idx, views);
|
||||
// }
|
||||
// ssList.add(ss);
|
||||
// } catch (StorageException e) {
|
||||
// throw new PluginException("Error updating point file", e);
|
||||
// }
|
||||
// }
|
||||
// // Aggregate the storage status errors
|
||||
// StorageStatus aggregatedStatus = new StorageStatus();
|
||||
// List<StorageException> se = new ArrayList<StorageException>();
|
||||
// for (StorageStatus ss : ssList) {
|
||||
// StorageException[] seArr = ss.getExceptions();
|
||||
// if (seArr != null) {
|
||||
// se.addAll(Arrays.asList(seArr));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// aggregatedStatus.setExceptions(se.toArray(new StorageException[se
|
||||
// .size()]));
|
||||
// return aggregatedStatus;
|
||||
// }
|
||||
//
|
||||
// finally {
|
||||
// System.out.println("Time spent in persist: "
|
||||
// + (System.currentTimeMillis() - t0));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// public File getFullFilePath(PluginDataObject p) {
|
||||
// File file;
|
||||
// String directory = p.getPluginName() + File.separator
|
||||
// + pathProvider.getHDFPath(p.getPluginName(), (IPersistable) p);
|
||||
// file = new File(directory
|
||||
// + File.separator
|
||||
// + pathProvider.getHDFFileName(p.getPluginName(),
|
||||
// (IPersistable) p));
|
||||
// return file;
|
||||
// }
|
||||
|
||||
/**
|
||||
* Overridden to purge expired data from geomag_k1min, geomag_houravg and
|
||||
* geomag_k3hr tables as well
|
||||
*/
|
||||
@Override
|
||||
public int purgeDataByRefTime(Date refTime,
|
||||
Map<String, String> productKeys, boolean trackHdf5,
|
||||
boolean trackToUri, Map<String, List<String>> hdf5FileToUriPurged)
|
||||
throws DataAccessLayerException {
|
||||
|
||||
int results = super.purgeDataByRefTime(refTime, productKeys, trackHdf5,
|
||||
trackToUri, hdf5FileToUriPurged);
|
||||
|
||||
// delete expired data from geomag_k1min, geomag_houravg and geomag_k3hr
|
||||
// tables
|
||||
try {
|
||||
avgDao.purgeDataByRefTime(refTime);
|
||||
k1minDao.purgeDataByRefTime(refTime);
|
||||
k3hrDao.purgeDataByRefTime(refTime);
|
||||
} catch (Exception e) {
|
||||
PurgeLogger
|
||||
.logError(
|
||||
"Purging expired data from the secondary tables for this plugin failed.",
|
||||
this.pluginName);
|
||||
}
|
||||
|
||||
return results;
|
||||
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public StorageStatus persistToHDF5(PluginDataObject... records)
|
||||
// throws PluginException {
|
||||
// long t0 = System.currentTimeMillis();
|
||||
//
|
||||
// // NOTE: currently making the assumption that models aren't
|
||||
// // mixed in the records aggregate. If this isn't true,
|
||||
// // some pre-processing will be needed.
|
||||
// Map<PointDataContainer, List<PointDataView>> containerMap = new
|
||||
// HashMap<PointDataContainer, List<PointDataView>>(
|
||||
// records.length);
|
||||
// Map<PointDataContainer, File> fileMap = new HashMap<PointDataContainer,
|
||||
// File>();
|
||||
//
|
||||
// for (PluginDataObject p : records) {
|
||||
// if (p instanceof IPointData) {
|
||||
// PointDataView pdv = ((IPointData) p).getPointDataView();
|
||||
// List<PointDataView> views = containerMap
|
||||
// .get(pdv.getContainer());
|
||||
// if (views == null) {
|
||||
// views = new ArrayList<PointDataView>();
|
||||
// containerMap.put(pdv.getContainer(), views);
|
||||
// }
|
||||
// views.add(pdv);
|
||||
// File file = fileMap.get(pdv.getContainer());
|
||||
// if (file == null) {
|
||||
// file = getFullFilePath(p);
|
||||
// System.out.println("***file " +file.toString());
|
||||
// fileMap.put(pdv.getContainer(), file);
|
||||
// }
|
||||
//
|
||||
// }DataTime
|
||||
// }
|
||||
//
|
||||
// List<StorageStatus> ssList = new ArrayList<StorageStatus>();
|
||||
// try {
|
||||
// for (PointDataContainer container : containerMap.keySet()) {
|
||||
// IDataStore ds = DataStoreFactory.getDataStore(fileMap
|
||||
// .get(container));
|
||||
// StorageProperties sp = new StorageProperties();
|
||||
// String compression = PluginRegistry.getInstance()
|
||||
// .getRegisteredObject(pluginName).getCompression();
|
||||
// if (compression != null) {
|
||||
// sp.setCompression(StorageProperties.Compression
|
||||
// .valueOf(compression));
|
||||
// }
|
||||
//
|
||||
// Set<String> params = container.getParameters();
|
||||
// for (String param : params) {
|
||||
// try {
|
||||
// IDataRecord idr = container.getParameterRecord(param);
|
||||
// ds.addDataRecord(idr, sp);
|
||||
// } catch (StorageException e) {
|
||||
// throw new PluginException("Error adding record", e);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// try {
|
||||
// StorageStatus ss = ds.store(StoreOp.APPEND);
|
||||
// if (ss.getOperationPerformed() == StoreOp.APPEND) {
|
||||
// // increment the indices
|
||||
// List<PointDataView> views = containerMap.get(container);
|
||||
// int idx = (int) ss.getIndexOfAppend()[0];
|
||||
// container.incrementIds(idx, views);
|
||||
// }
|
||||
// ssList.add(ss);
|
||||
// } catch (StorageException e) {
|
||||
// throw new PluginException("Error updating point file", e);
|
||||
// }
|
||||
// }
|
||||
// // Aggregate the storage status errors
|
||||
// StorageStatus aggregatedStatus = new StorageStatus();
|
||||
// List<StorageException> se = new ArrayList<StorageException>();
|
||||
// for (StorageStatus ss : ssList) {
|
||||
// StorageException[] seArr = ss.getExceptions();
|
||||
// if (seArr != null) {
|
||||
// se.addAll(Arrays.asList(seArr));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// aggregatedStatus.setExceptions(se.toArray(new StorageException[se
|
||||
// .size()]));
|
||||
// return aggregatedStatus;
|
||||
// }
|
||||
//
|
||||
// finally {
|
||||
// System.out.println("Time spent in persist: "
|
||||
// + (System.currentTimeMillis() - t0));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// public File getFullFilePath(PluginDataObject p) {
|
||||
// File file;
|
||||
// String directory = p.getPluginName() + File.separator
|
||||
// + pathProvider.getHDFPath(p.getPluginName(), (IPersistable) p);
|
||||
// file = new File(directory
|
||||
// + File.separator
|
||||
// + pathProvider.getHDFFileName(p.getPluginName(),
|
||||
// (IPersistable) p));
|
||||
// return file;
|
||||
// }
|
||||
|
||||
/**
|
||||
* Retrieves an geomag report using the datauri .
|
||||
*
|
||||
|
@ -152,7 +188,7 @@ public class GeoMagDao extends PluginDao {
|
|||
* @return The report record if it exists.
|
||||
*/
|
||||
public GeoMagRecord queryByDataURI(String dataURI) {
|
||||
GeoMagRecord report = null;
|
||||
GeoMagRecord report = null;
|
||||
List<?> obs = null;
|
||||
try {
|
||||
obs = queryBySingleCriteria("dataURI", dataURI);
|
||||
|
@ -184,406 +220,82 @@ public class GeoMagDao extends PluginDao {
|
|||
}
|
||||
|
||||
@Override
|
||||
public IDataStore populateDataStore(IDataStore dataStore, IPersistable record)
|
||||
throws StorageException {
|
||||
//return null;
|
||||
public IDataStore populateDataStore(IDataStore dataStore,
|
||||
IPersistable record) throws StorageException {
|
||||
|
||||
GeoMagRecord magRecord = (GeoMagRecord) record;
|
||||
|
||||
// change to 00:00:00.0. "/geomag/2013-04-01_00:00:00.0/BOU/102/GEOMAG";
|
||||
// String headUri = magRecord.getDataURI();
|
||||
// headUri = headUri.substring(0, 18) +"_00:00:00.0/" + headUri.substring(30);
|
||||
|
||||
/*
|
||||
* Write observation times to HDF5.
|
||||
*/
|
||||
// if (magRecord.getObsTimes() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new LongDataRecord(GeoMagRecord.OBS_TIME,
|
||||
// headUri, (long[]) magRecord.getObsTimes(), 1,
|
||||
// new long[] {magRecord.getObsTimes().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
|
||||
// if (magRecord.getCompIdx() != null) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.CompIdx,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getCompIdx(), 1,
|
||||
// new long[] {magRecord.getCompIdx().length});
|
||||
//
|
||||
// storageRecord.setCorrelationObject(record);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
// }
|
||||
|
||||
/*
|
||||
* Write component1 data to HDF5.
|
||||
*/
|
||||
if ( magRecord.getComp1Data() != null ) {
|
||||
AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.component1,
|
||||
magRecord.getDataURI(), (float[]) magRecord.getComp1Data(), 1,
|
||||
new long[] {magRecord.getComp1Data().length});
|
||||
storageRecord.setCorrelationObject(magRecord);
|
||||
dataStore.addDataRecord(storageRecord);
|
||||
//StorageStatus ss = dataStore.store(StoreOp.APPEND);
|
||||
if (magRecord.getComp1Data() != null) {
|
||||
AbstractStorageRecord storageRecord = new FloatDataRecord(
|
||||
GeoMagRecord.component1, magRecord.getDataURI(),
|
||||
(float[]) magRecord.getComp1Data(), 1,
|
||||
new long[] { magRecord.getComp1Data().length });
|
||||
storageRecord.setCorrelationObject(magRecord);
|
||||
dataStore.addDataRecord(storageRecord);
|
||||
// StorageStatus ss = dataStore.store(StoreOp.APPEND);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Write component2 data to HDF5.
|
||||
*/
|
||||
if ( magRecord.getComp2Data() != null ) {
|
||||
AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.component2,
|
||||
magRecord.getDataURI(), (float[]) magRecord.getComp2Data(), 1,
|
||||
new long[] {magRecord.getComp2Data().length});
|
||||
storageRecord.setCorrelationObject(magRecord);
|
||||
dataStore.addDataRecord(storageRecord);
|
||||
|
||||
if (magRecord.getComp2Data() != null) {
|
||||
AbstractStorageRecord storageRecord = new FloatDataRecord(
|
||||
GeoMagRecord.component2, magRecord.getDataURI(),
|
||||
(float[]) magRecord.getComp2Data(), 1,
|
||||
new long[] { magRecord.getComp2Data().length });
|
||||
storageRecord.setCorrelationObject(magRecord);
|
||||
dataStore.addDataRecord(storageRecord);
|
||||
|
||||
}
|
||||
|
||||
// /*
|
||||
// * Write component3 data to HDF5.
|
||||
// */
|
||||
// if ( magRecord.getComp3Data() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.component3,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getComp3Data(), 1,
|
||||
// new long[] {magRecord.getComp3Data().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// /*
|
||||
// * Write component4 data to HDF5.
|
||||
// */
|
||||
// if ( magRecord.getComp4Data() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.component4,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getComp4Data(), 1,
|
||||
// new long[] {magRecord.getComp4Data().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
// if (magRecord.getHrAvgIdx() != null) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.HrAvgIdx,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getHrAvgIdx(), 1,
|
||||
// new long[] {magRecord.getHrAvgIdx().length});
|
||||
//
|
||||
// storageRecord.setCorrelationObject(record);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
// }
|
||||
//
|
||||
// /*
|
||||
// * Write H_HR_AVG data to HDF5.
|
||||
// */
|
||||
// if ( magRecord.getHrAvgH() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.H_HR_AVG,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHrAvgH(), 1,
|
||||
// new long[] {magRecord.getHrAvgH().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// /*
|
||||
// * Write D_HR_AVG data to HDF5.
|
||||
// */
|
||||
// if ( magRecord.getHrAvgD() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.D_HR_AVG,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHrAvgD(), 1,
|
||||
// new long[] {magRecord.getHrAvgD().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
// /*
|
||||
// * Write K_Index data to HDF5.
|
||||
// */
|
||||
// // 3hr
|
||||
// if ( magRecord.getKKIndex() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.K_Index,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getKKIndex(), 1,
|
||||
// new long[] {magRecord.getKKIndex().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKKGamma() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_Gamma,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKKGamma(), 1,
|
||||
// new long[] {magRecord.getKKGamma().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKKReal() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_Real,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKKReal(), 1,
|
||||
// new long[] {magRecord.getKKReal().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKestIndex() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.Kest_Index,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getKestIndex(), 1,
|
||||
// new long[] {magRecord.getKestIndex().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKestGamma() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Gamma,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKestGamma(), 1,
|
||||
// new long[] {magRecord.getKestGamma().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKestReal() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Real,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKestReal(), 1,
|
||||
// new long[] {magRecord.getKestReal().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getHKGamma() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Gamma,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHKGamma(), 1,
|
||||
// new long[] {magRecord.getHKGamma().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getHKReal() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Real,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHKReal(), 1,
|
||||
// new long[] {magRecord.getHKReal().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDKGamma() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Gamma,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getDKGamma(), 1,
|
||||
// new long[] {magRecord.getDKGamma().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDKReal() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Real,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getDKReal(), 1,
|
||||
// new long[] {magRecord.getDKReal().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
//
|
||||
//
|
||||
// // 1 min
|
||||
// if ( magRecord.getKestIndex1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.Kest_Index_1m,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getKestIndex1m(), 1,
|
||||
// new long[] {magRecord.getKestIndex1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKestGamma1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Gamma_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKestGamma1m(), 1,
|
||||
// new long[] {magRecord.getKestGamma1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getKestReal1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Kest_Real_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKestReal1m(), 1,
|
||||
// new long[] {magRecord.getKestReal1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getHKGamma1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Gamma_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHKGamma1m(), 1,
|
||||
// new long[] {magRecord.getHKGamma1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getHKReal1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Real_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHKReal1m(), 1,
|
||||
// new long[] {magRecord.getHKReal1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDKGamma1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Gamma_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getDKGamma1m(), 1,
|
||||
// new long[] {magRecord.getDKGamma1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDKReal1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Real_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getDKReal1m(), 1,
|
||||
// new long[] {magRecord.getDKReal1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
// if ( magRecord.getHKIndex1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KH_Index_1m,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getHKIndex1m(), 1,
|
||||
// new long[] {magRecord.getHKIndex1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDKIndex1m() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KD_Index_1m,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getDKIndex1m(), 1,
|
||||
// new long[] {magRecord.getDKIndex1m().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getHCount() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KH_Count,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getHCount(), 1,
|
||||
// new long[] {magRecord.getHCount().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDCount() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.KD_Count,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getDCount(), 1,
|
||||
// new long[] {magRecord.getDCount().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getHDev() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KH_Dev,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getHDev(), 1,
|
||||
// new long[] {magRecord.getHDev().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if ( magRecord.getDDev() != null ) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.KD_Dev,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getDDev(), 1,
|
||||
// new long[] {magRecord.getDDev().length});
|
||||
// storageRecord.setCorrelationObject(magRecord);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if (magRecord.getKs() != null) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.K_s,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getKs(), 1,
|
||||
// new long[] {magRecord.getKs().length});
|
||||
// storageRecord.setCorrelationObject(record);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if (magRecord.getAest() != null) {
|
||||
// AbstractStorageRecord storageRecord = new IntegerDataRecord(GeoMagRecord.A_est,
|
||||
// magRecord.getDataURI(), (int[]) magRecord.getAest(), 1,
|
||||
// new long[] {magRecord.getAest().length});
|
||||
// storageRecord.setCorrelationObject(record);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if (magRecord.getLastUpdate() != null) {
|
||||
// AbstractStorageRecord storageRecord = new FloatDataRecord(GeoMagRecord.Last_Update_1m,
|
||||
// magRecord.getDataURI(), (float[]) magRecord.getLastUpdate(), 1,
|
||||
// new long[] {magRecord.getLastUpdate().length});
|
||||
// storageRecord.setCorrelationObject(record);
|
||||
// dataStore.addDataRecord(storageRecord);
|
||||
//
|
||||
// }
|
||||
|
||||
return dataStore;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Get GeoMagAvg from ID
|
||||
* Get GeoMagAvg from ID
|
||||
*/
|
||||
public GeoMagAvg getGeoMagAvg(Date avgTime) {
|
||||
return (GeoMagAvg) avgDao.queryById(avgTime);
|
||||
}
|
||||
|
||||
// public Integer getGeoMagSourceId(String sourceName) throws DataAccessLayerException {
|
||||
// return avgDao.getSourceId(sourceName);
|
||||
// }
|
||||
|
||||
public GeoMagAvgDao getGeoMagAvgDao() {
|
||||
return avgDao;
|
||||
return (GeoMagAvg) avgDao.queryById(avgTime);
|
||||
}
|
||||
|
||||
public GeoMagAvgDao getGeoMagAvgDao() {
|
||||
return avgDao;
|
||||
}
|
||||
|
||||
public void setGeoMagAvgDao(GeoMagAvgDao avgDao) {
|
||||
this.avgDao = avgDao;
|
||||
}
|
||||
|
||||
public void setGeoMagAvgDao(
|
||||
GeoMagAvgDao avgDao) {
|
||||
this.avgDao = avgDao;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get GeoMagK1min from ID
|
||||
* Get GeoMagK1min from ID
|
||||
*/
|
||||
public GeoMagK1min getGeoMagDateK1min(int id) {
|
||||
return k1minDao.queryById(id);
|
||||
return (GeoMagK1min) k1minDao.queryById(id);
|
||||
}
|
||||
|
||||
|
||||
public GeoMagK1minDao getGeoMagK1minDao() {
|
||||
return k1minDao;
|
||||
return k1minDao;
|
||||
}
|
||||
|
||||
public void setGeoMagK1minDao(
|
||||
GeoMagK1minDao k1minDao) {
|
||||
this.k1minDao = k1minDao;
|
||||
|
||||
public void setGeoMagK1minDao(GeoMagK1minDao k1minDao) {
|
||||
this.k1minDao = k1minDao;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get GeoMagK1min from ID
|
||||
* Get GeoMagK1min from ID
|
||||
*/
|
||||
public GeoMagK3hr getGeoMagDateK3hr(int id) {
|
||||
return k3hrDao.queryById(id);
|
||||
}
|
||||
|
||||
public GeoMagK3hrDao getGeoMagK3hrDao() {
|
||||
return k3hrDao;
|
||||
return (GeoMagK3hr) k3hrDao.queryById(id);
|
||||
}
|
||||
|
||||
public void setGeoMagK3hrDao(
|
||||
GeoMagK3hrDao k3hrDao) {
|
||||
this.k3hrDao = k3hrDao;
|
||||
}
|
||||
public GeoMagK3hrDao getGeoMagK3hrDao() {
|
||||
return k3hrDao;
|
||||
}
|
||||
|
||||
public void setGeoMagK3hrDao(GeoMagK3hrDao k3hrDao) {
|
||||
this.k3hrDao = k3hrDao;
|
||||
}
|
||||
}
|
|
@ -1,8 +1,7 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.dao;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK1min;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -14,19 +13,21 @@ import org.springframework.orm.hibernate3.HibernateTemplate;
|
|||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK1min;
|
||||
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
|
||||
/**
|
||||
* Record implementation for geomag k1minDao.
|
||||
* Record implementation for geomag k1minDao.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ---------------- --------------------------
|
||||
* 08/14/2013 T989 qzhou Initial creation.
|
||||
* 03/03/2014 #1110 qzhou Added method getRangeK1min(), Cleaned code
|
||||
* 03/13/2014 sgurung Added method purgeDataByRefTime()
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
|
@ -34,43 +35,68 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
*/
|
||||
|
||||
public class GeoMagK1minDao extends CoreDao {
|
||||
/**
|
||||
* Creates a new GribModelDao
|
||||
/**
|
||||
* Creates a new GeoMagK1minDao
|
||||
*/
|
||||
public GeoMagK1minDao() {
|
||||
super(DaoConfig.forClass(GeoMagK1min.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a GeoMagAvgId based on the given id
|
||||
*
|
||||
* @param id
|
||||
* The given ID number
|
||||
* @return The GeoMagAvgId
|
||||
*/
|
||||
public GeoMagK1min queryById(int id) {
|
||||
return (GeoMagK1min) super.queryById(id);
|
||||
}
|
||||
|
||||
public int getAreaId (int id){
|
||||
return queryById(id).getId();
|
||||
/**
|
||||
* Retrieves data from postGres
|
||||
*
|
||||
* @return Criteria list
|
||||
*/
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<GeoMagK1min> getRangeK1min(final String stationCode,
|
||||
final Date start, final Date end) {
|
||||
return (List<GeoMagK1min>) txTemplate
|
||||
.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
HibernateTemplate ht = getHibernateTemplate();
|
||||
Session sess = ht.getSessionFactory()
|
||||
.getCurrentSession();
|
||||
Criteria crit = sess.createCriteria(GeoMagK1min.class);
|
||||
Criterion where1 = Restrictions.eq("stationCode",
|
||||
stationCode);
|
||||
crit.add(where1);
|
||||
Criterion where2 = Restrictions.ge("refTime", start);
|
||||
crit.add(where2);
|
||||
Criterion where3 = Restrictions.lt("refTime", end);
|
||||
crit.add(where3);
|
||||
|
||||
return crit.list();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<GeoMagK1min> getSingleK1min(final String stationCode, final Date date) {
|
||||
|
||||
return (List<GeoMagK1min>) txTemplate.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
HibernateTemplate ht = getHibernateTemplate();
|
||||
Session sess = ht.getSessionFactory().getCurrentSession();
|
||||
Criteria crit = sess.createCriteria(GeoMagK1min.class);
|
||||
Criterion where1 = Restrictions.eq("stationCode", stationCode);
|
||||
crit.add(where1);
|
||||
Criterion where2 = Restrictions.eq("refTime", date);
|
||||
crit.add(where2);
|
||||
return crit.list();
|
||||
}
|
||||
});
|
||||
public List<GeoMagK1min> getSingleK1min(final String stationCode,
|
||||
final Date date) {
|
||||
|
||||
return (List<GeoMagK1min>) txTemplate
|
||||
.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
HibernateTemplate ht = getHibernateTemplate();
|
||||
Session sess = ht.getSessionFactory()
|
||||
.getCurrentSession();
|
||||
Criteria crit = sess.createCriteria(GeoMagK1min.class);
|
||||
Criterion where1 = Restrictions.eq("stationCode",
|
||||
stationCode);
|
||||
crit.add(where1);
|
||||
Criterion where2 = Restrictions.eq("refTime", date);
|
||||
crit.add(where2);
|
||||
return crit.list();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public int purgeDataByRefTime(Date refTime) throws DataAccessLayerException {
|
||||
DatabaseQuery deleteStmt = new DatabaseQuery(this.daoClass);
|
||||
deleteStmt.addQueryParam("refTime", refTime);
|
||||
return this.deleteByCriteria(deleteStmt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.dao;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK3hr;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -12,19 +13,20 @@ import org.springframework.orm.hibernate3.HibernateTemplate;
|
|||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK3hr;
|
||||
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
|
||||
/**
|
||||
* Record implementation for geomag k3hrDao.
|
||||
* Record implementation for geomag k3hrDao.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ---------------- --------------------------
|
||||
* 08/14/2013 T989 qzhou Initial creation.
|
||||
* 03/13/2014 sgurung Added method purgeDataByRefTime()
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
|
@ -32,30 +34,21 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
*/
|
||||
|
||||
public class GeoMagK3hrDao extends CoreDao {
|
||||
/**
|
||||
/**
|
||||
* Creates a new GribModelDao
|
||||
*/
|
||||
public GeoMagK3hrDao() {
|
||||
super(DaoConfig.forClass(GeoMagK3hr.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a GeoMagAvgId based on the given id
|
||||
*
|
||||
* @param id
|
||||
* The given ID number
|
||||
* @return The GeoMagAvgId
|
||||
*/
|
||||
public GeoMagK3hr queryById(int id) {
|
||||
return (GeoMagK3hr) super.queryById(id);
|
||||
}
|
||||
|
||||
public int getId (int id){
|
||||
return queryById(id).getId();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves data from postGres
|
||||
*
|
||||
* @return Criteria list
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<GeoMagK3hr> getRangeK3hr(final String stationCode, final Date start, final Date end) {
|
||||
public List<GeoMagK3hr> getRangeK3hr(final String stationCode,
|
||||
final Date start, final Date end) {
|
||||
return (List<GeoMagK3hr>) txTemplate.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
|
@ -64,10 +57,10 @@ public class GeoMagK3hrDao extends CoreDao {
|
|||
Criteria crit = sess.createCriteria(GeoMagK3hr.class);
|
||||
Criterion where1 = Restrictions.eq("stationCode", stationCode);
|
||||
crit.add(where1);
|
||||
Criterion where2 = Restrictions.gt("refTime", start);
|
||||
Criterion where2 = Restrictions.gt("refTime", start);
|
||||
crit.add(where2);
|
||||
Criterion where3 = Restrictions.lt("refTime", end);
|
||||
crit.add(where3);
|
||||
Criterion where3 = Restrictions.lt("refTime", end);
|
||||
crit.add(where3);
|
||||
|
||||
return crit.list();
|
||||
}
|
||||
|
@ -75,7 +68,8 @@ public class GeoMagK3hrDao extends CoreDao {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<GeoMagK3hr> getSingleK3hr(final String stationCode, final Date time) {
|
||||
public List<GeoMagK3hr> getSingleK3hr(final String stationCode,
|
||||
final Date time) {
|
||||
return (List<GeoMagK3hr>) txTemplate.execute(new TransactionCallback() {
|
||||
@Override
|
||||
public Object doInTransaction(TransactionStatus status) {
|
||||
|
@ -90,5 +84,10 @@ public class GeoMagK3hrDao extends CoreDao {
|
|||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public int purgeDataByRefTime(Date refTime) throws DataAccessLayerException {
|
||||
DatabaseQuery deleteStmt = new DatabaseQuery(this.daoClass);
|
||||
deleteStmt.addQueryParam("refTime", refTime);
|
||||
return this.deleteByCriteria(deleteStmt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,316 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.request;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagAvg;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK1min;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagK3hr;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.GeoMagRecord;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.calculation.CalcUtil;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.dao.GeoMagAvgDao;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.dao.GeoMagDao;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.dao.GeoMagK1minDao;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.geomag.dao.GeoMagK3hrDao;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import com.raytheon.uf.common.dataquery.db.QueryParam;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
|
||||
/**
|
||||
*
|
||||
* Retrieve data from database utility for a given dataURI
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 2014/02/12 #1123 qzhou Moved from edex to here
|
||||
* 2014/06/27 #1136 qzhou Change hour avg to 0-current time
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1.0
|
||||
*/
|
||||
public class DatabaseUtil {
|
||||
|
||||
private static final int AVG_DATA_RANGE = 30;
|
||||
|
||||
private static final float MISSING_VAL = 99999.99f;
|
||||
|
||||
/*
|
||||
* from geomag
|
||||
*/
|
||||
public static List<?> retrieveUriForAvg(GeoMagDao dao, String dataUri,
|
||||
Date time) {
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName());
|
||||
query.addReturnedField("component_1");
|
||||
query.addReturnedField("component_2");
|
||||
query.addReturnedField("dataTime.refTime");
|
||||
query.addReturnedField("badDataPoint");
|
||||
query.addReturnedField("sourceId");
|
||||
|
||||
// called only when time is 59min, so include it.
|
||||
query.addQueryParam("dataTime.refTime", time,
|
||||
QueryParam.QueryOperand.LESSTHANEQUALS);
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(time);
|
||||
cal.set(Calendar.MINUTE, 0);
|
||||
// cal.add(Calendar.HOUR_OF_DAY, -1);
|
||||
|
||||
query.addQueryParam("dataTime.refTime", cal.getTime(),
|
||||
QueryParam.QueryOperand.GREATERTHANEQUALS);
|
||||
query.addQueryParam("stationCode", station);
|
||||
|
||||
List<?> resultsList = null;
|
||||
|
||||
try {
|
||||
resultsList = dao.queryByCriteria(query); // 60
|
||||
} catch (DataAccessLayerException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
return resultsList;
|
||||
}
|
||||
|
||||
/*
|
||||
* from geomag_houravg
|
||||
*/
|
||||
public static List<GeoMagAvg> retrieveSingleAvg(String dataUri, Date time) {
|
||||
GeoMagAvgDao avgDao = new GeoMagAvgDao();
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
List<GeoMagAvg> resultsList = null;
|
||||
resultsList = avgDao.getSingleAvg(station, time);
|
||||
|
||||
return resultsList;
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* from geomag_houravg
|
||||
*/
|
||||
public static List<GeoMagAvg> retrieveUriBy3hr(String dataUri, Date spTime) {
|
||||
GeoMagAvgDao avgDao = new GeoMagAvgDao();
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(spTime);
|
||||
cal.add(Calendar.DAY_OF_YEAR, -AVG_DATA_RANGE); // at least one day is
|
||||
// needed for gt, lt
|
||||
|
||||
// since avg have min=30, cal.getTime() and spTime are not included
|
||||
List<GeoMagAvg> resultsList = null;
|
||||
resultsList = avgDao.getAvgForStation(station, cal.getTime(), spTime); // 720
|
||||
|
||||
return resultsList;
|
||||
}
|
||||
|
||||
/*
|
||||
* from geomag
|
||||
*/
|
||||
public static List<?> retrieveUriForK1min(GeoMagDao dao, String dataUri,
|
||||
Date time) {
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
DatabaseQuery query = new DatabaseQuery(GeoMagRecord.class.getName());
|
||||
|
||||
query.addReturnedField("component_1");
|
||||
query.addReturnedField("component_2");
|
||||
query.addReturnedField("dataTime.refTime");
|
||||
query.addReturnedField("badDataPoint");
|
||||
query.addReturnedField("sourceId");
|
||||
|
||||
// Document uses epTime-1minute. Consider 3 sources, we use current time
|
||||
query.addQueryParam("dataTime.refTime", time,
|
||||
QueryParam.QueryOperand.LESSTHANEQUALS);
|
||||
|
||||
Date epTime = CalcUtil.getEPTime(time);
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(epTime);
|
||||
cal.add(Calendar.HOUR_OF_DAY, -48);
|
||||
|
||||
// start time is epTime-48hour. So use GREATERTHANEQUALS
|
||||
query.addQueryParam("dataTime.refTime", cal.getTime(),
|
||||
QueryParam.QueryOperand.GREATERTHANEQUALS);
|
||||
query.addQueryParam("stationCode", station);
|
||||
|
||||
List<?> resultsList = null;
|
||||
try {
|
||||
resultsList = dao.queryByCriteria(query); // 2880
|
||||
} catch (DataAccessLayerException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
return resultsList;
|
||||
}
|
||||
|
||||
/*
|
||||
* from geomag_k1min
|
||||
*/
|
||||
public static List<GeoMagK1min> retrieveSingleK1min(String dataUri,
|
||||
Date time) {
|
||||
GeoMagK1minDao k1minDao = new GeoMagK1minDao();
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
List<GeoMagK1min> resultsList = null;
|
||||
resultsList = k1minDao.getSingleK1min(station, time);
|
||||
|
||||
return resultsList;
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* from geomag_k3hr
|
||||
*/
|
||||
public static List<GeoMagK3hr> retrieveUriForK3hr(String dataUri,
|
||||
Date epTime) {
|
||||
GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao();
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(epTime);
|
||||
cal.add(Calendar.DAY_OF_YEAR, -1);
|
||||
|
||||
List<GeoMagK3hr> resultsList = null;
|
||||
resultsList = k3hrDao.getRangeK3hr(station, cal.getTime(), epTime); // 1
|
||||
|
||||
return resultsList;
|
||||
}
|
||||
|
||||
/*
|
||||
* from geomag_k3hr
|
||||
*/
|
||||
public static List<GeoMagK3hr> retrieveSingleK3hr(String dataUri,
|
||||
Date epTime) {
|
||||
GeoMagK3hrDao k3hrDao = new GeoMagK3hrDao();
|
||||
String station = CalcUtil.getStationFromUri(dataUri);
|
||||
|
||||
List<GeoMagK3hr> resultsList = null;
|
||||
resultsList = k3hrDao.getSingleK3hr(station, epTime);
|
||||
|
||||
return resultsList;
|
||||
}
|
||||
|
||||
/*
|
||||
* sort n lists
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static void sort(List... lists) {
|
||||
assert lists.length > 0;
|
||||
|
||||
Object[][] objects = new Object[lists[0].size()][lists.length];
|
||||
|
||||
for (int i = 0; i < lists.length; i++) {
|
||||
int j = 0;
|
||||
for (Object object : lists[i]) {
|
||||
objects[j++][i] = object;
|
||||
}
|
||||
}
|
||||
|
||||
Arrays.sort(objects, new Comparator<Object[]>() {
|
||||
|
||||
public int compare(Object[] o1, Object[] o2) {
|
||||
return ((Comparable) o1[0]).compareTo(o2[0]);
|
||||
}
|
||||
});
|
||||
|
||||
for (int i = 0; i < lists.length; i++) {
|
||||
lists[i].clear();
|
||||
for (Object[] tuple : objects) {
|
||||
lists[i].add(tuple[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* fill time tag gaps, return fullBestList
|
||||
*/
|
||||
public static void fillHrAvgTimeGaps(List<GeoMagAvg> dataList,
|
||||
List<Date> dateListFinal, List<Float> hHrAvgListFinal,
|
||||
List<Float> dHrAvgListFinal, Date spTime) {
|
||||
List<Date> dateList = new ArrayList<Date>();
|
||||
List<Float> hHrAvgList = new ArrayList<Float>();
|
||||
List<Float> dHrAvgList = new ArrayList<Float>();
|
||||
|
||||
for (int i = 0; i < dataList.size(); i++) { // 1 extra
|
||||
|
||||
GeoMagAvg row = dataList.get(i);
|
||||
|
||||
dateList.add((Date) row.getAvgTime());
|
||||
hHrAvgList.add((Float) row.gethHrAvg());
|
||||
dHrAvgList.add((Float) row.getdHrAvg());
|
||||
|
||||
}
|
||||
|
||||
DatabaseUtil.sort(dateList, hHrAvgList, dHrAvgList);
|
||||
|
||||
/*
|
||||
* fill missing
|
||||
*/
|
||||
|
||||
// fill missing in the beginning
|
||||
Date date = (Date) dateList.get(0);
|
||||
int hr0 = date.getHours();
|
||||
|
||||
if (hr0 != spTime.getHours()) {
|
||||
for (int k = 0; k < hr0; k++) {
|
||||
|
||||
Date dateNew = (Date) date.clone();
|
||||
dateNew.setHours(k); // change setMinutes to setHours
|
||||
|
||||
dateListFinal.add(dateNew);
|
||||
hHrAvgListFinal.add(MISSING_VAL);
|
||||
dHrAvgListFinal.add(MISSING_VAL);
|
||||
}
|
||||
}
|
||||
|
||||
// fill missing in the middle
|
||||
for (int i = 0; i < dateList.size(); i++) {
|
||||
Date date0 = dateList.get(i);
|
||||
dateListFinal.add(date0); // change from data to data0
|
||||
hHrAvgListFinal.add(hHrAvgList.get(i));
|
||||
dHrAvgListFinal.add(dHrAvgList.get(i));
|
||||
|
||||
if (i + 1 < dateList.size()) {
|
||||
Date date1 = (Date) dateList.get(i + 1);
|
||||
int diffHr = (int) (date1.getTime() - date0.getTime())
|
||||
/ (3600 * 1000);
|
||||
|
||||
if (diffHr != 1) {
|
||||
for (int j = 0; j < diffHr - 1; j++) {
|
||||
dateListFinal.add(new Date(date0.getTime() + 3600
|
||||
* 1000 * (j + 1)));
|
||||
// append after i, i+1
|
||||
hHrAvgListFinal.add(MISSING_VAL);
|
||||
dHrAvgListFinal.add(MISSING_VAL);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// fill missing in the end // changed ending from 24 * AVG_DATA_RANGE to
|
||||
// 23(end of the day)
|
||||
int latest = dateListFinal.size();
|
||||
if (latest < 24 * AVG_DATA_RANGE) {
|
||||
for (int k = latest; k < 24 * AVG_DATA_RANGE; k++) {
|
||||
dateListFinal.add(new Date(dateListFinal.get(latest - 1)
|
||||
.getTime() + 3600 * 1000 * (k + 1)));
|
||||
hHrAvgListFinal.add(MISSING_VAL);
|
||||
dHrAvgListFinal.add(MISSING_VAL);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.request;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
import com.raytheon.uf.common.serialization.comm.IServerRequest;
|
||||
|
||||
/**
|
||||
*
|
||||
* Request for a GeoMagAvg for the given dataURI
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 2014/02/12 #1110 qzhou Init
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1.0
|
||||
*/
|
||||
@DynamicSerialize
|
||||
public class RetrieveHrAvgRequest implements IServerRequest {
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String stationCode;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Date startTime;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Date endTime;
|
||||
|
||||
public RetrieveHrAvgRequest() {
|
||||
}
|
||||
|
||||
public RetrieveHrAvgRequest(String stationCode, Date startTime, Date endTime) {
|
||||
super();
|
||||
this.stationCode = stationCode;
|
||||
this.startTime = startTime;
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
public String getStationCode() {
|
||||
return stationCode;
|
||||
}
|
||||
|
||||
public void setStationCode(String stationCode) {
|
||||
this.stationCode = stationCode;
|
||||
}
|
||||
|
||||
public Date getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public void setStartTime(Date startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public Date getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public void setEndTime(Date endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.request;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
import com.raytheon.uf.common.serialization.comm.IServerRequest;
|
||||
|
||||
/**
|
||||
*
|
||||
* Request for a GeoMagk1min for the given dataURI
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 2014/02/12 #1110 qzhou Init
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1.0
|
||||
*/
|
||||
@DynamicSerialize
|
||||
public class RetrieveK1minRequest implements IServerRequest {
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String stationCode;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Date startTime;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Date endTime;
|
||||
|
||||
public RetrieveK1minRequest() {
|
||||
}
|
||||
|
||||
public RetrieveK1minRequest(String stationCode, Date startTime, Date endTime) {
|
||||
super();
|
||||
this.stationCode = stationCode;
|
||||
this.startTime = startTime;
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
public String getStationCode() {
|
||||
return stationCode;
|
||||
}
|
||||
|
||||
public void setStationCode(String stationCode) {
|
||||
this.stationCode = stationCode;
|
||||
}
|
||||
|
||||
public Date getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public void setStartTime(Date startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public Date getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public void setEndTime(Date endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.geomag.request;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
import com.raytheon.uf.common.serialization.comm.IServerRequest;
|
||||
|
||||
/**
|
||||
*
|
||||
* Request for a GeoMagk1min for the given dataURI
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 2014/07/01 #1136 qzhou Init
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author qzhou
|
||||
* @version 1.0
|
||||
*/
|
||||
@DynamicSerialize
|
||||
public class RetrieveSingleK1minRequest implements IServerRequest {
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String stationCode;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Date refTime;
|
||||
|
||||
public RetrieveSingleK1minRequest() {
|
||||
}
|
||||
|
||||
public RetrieveSingleK1minRequest(String stationCode, Date refTime) {
|
||||
super();
|
||||
this.stationCode = stationCode;
|
||||
this.refTime = refTime;
|
||||
}
|
||||
|
||||
public String getStationCode() {
|
||||
return stationCode;
|
||||
}
|
||||
|
||||
public void setStationCode(String stationCode) {
|
||||
this.stationCode = stationCode;
|
||||
}
|
||||
|
||||
public Date getRefTime() {
|
||||
return refTime;
|
||||
}
|
||||
|
||||
public void setRefTime(Date refTime) {
|
||||
this.refTime = refTime;
|
||||
}
|
||||
}
|
|
@ -14,6 +14,7 @@ Import-Package: com.raytheon.edex.uengine.tasks.query,
|
|||
com.raytheon.uf.common.pointdata,
|
||||
com.raytheon.uf.common.serialization.comm,
|
||||
com.raytheon.uf.edex.pointdata,
|
||||
gov.noaa.nws.ncep.edex.common.sounding,
|
||||
org.apache.commons.logging
|
||||
Export-Package: gov.noaa.nws.ncep.common.dataplugin.gpd,
|
||||
gov.noaa.nws.ncep.common.dataplugin.gpd.dao,
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
/**
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
|
@ -30,7 +34,10 @@ public class GenericPointDataConstants {
|
|||
// db filed defined in GenericPointDataRecord
|
||||
public static final String DB_PROD_NAME= "productInfo.name";
|
||||
public static final String DB_MASTER_LEVEL_NAME= "productInfo.masterLevel.name";
|
||||
public static final String DB_REFTIME_NAME= "dataTime.refTime";
|
||||
public static final String DB_REF_TIME= "dataTime.refTime";
|
||||
public static final String DB_FORECAST_TIME= "dataTime.fcstTime";
|
||||
public static final String DB_RANGESTART_TIME= "dataTime.validPeriod.start";
|
||||
public static final String DB_UTILITY_FLAGS= "dataTime.utilityFlags";
|
||||
public static final String DB_PRODUCT_VERSION = "productVersion";
|
||||
public static final String DB_STN_CATALOGTYPE = "location.catalogType";
|
||||
public static final String DB_SLAT = "slat";
|
||||
|
|
|
@ -1,3 +1,24 @@
|
|||
/**
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------- ------- -------- -----------
|
||||
* 05/30/2013 Chin J. Chen Initial coding
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author Chin J. Chen
|
||||
* @version 1.0
|
||||
*/
|
||||
package gov.noaa.nws.ncep.common.dataplugin.gpd;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.gpd.product.GenericPointDataProductInfo;
|
||||
|
@ -31,23 +52,6 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
|||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------- ------- -------- -----------
|
||||
* 05/30/2013 Chin J. Chen Initial coding
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author Chin J. Chen
|
||||
* @version 1.0
|
||||
*/
|
||||
@Entity
|
||||
// @Table(name = "gpd", uniqueConstraints = { @UniqueConstraint(columnNames = {
|
||||
// "dataURI" }) })
|
||||
|
@ -144,6 +148,7 @@ public class GenericPointDataRecord extends PersistablePluginDataObject
|
|||
this.slat = slat;
|
||||
this.slon = slon;
|
||||
this.pointDataView = pointDataView;
|
||||
// this.pluginName = "gpd";
|
||||
// System.out.println("GenericPointDataRecord(3) entered");
|
||||
}
|
||||
|
||||
|
@ -157,6 +162,7 @@ public class GenericPointDataRecord extends PersistablePluginDataObject
|
|||
this.pointDataView = pointDataView;
|
||||
this.dataTime = dataTime;
|
||||
this.productVersion = productVersion;
|
||||
// this.pluginName = "gpd";
|
||||
// System.out.println("GenericPointDataRecord(4) entered");
|
||||
}
|
||||
|
||||
|
@ -300,13 +306,10 @@ public class GenericPointDataRecord extends PersistablePluginDataObject
|
|||
this.productVersion = productVersion;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.dataplugin.PluginDataObject#getPluginName()
|
||||
*/
|
||||
@Override
|
||||
public String getPluginName() {
|
||||
return "gpd";
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
/**
|
||||
*
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
|
@ -26,8 +29,13 @@ import gov.noaa.nws.ncep.common.dataplugin.gpd.product.GenericPointDataProductIn
|
|||
import gov.noaa.nws.ncep.common.dataplugin.gpd.product.GenericPointDataStationProduct;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.gpd.query.GenericPointDataQuery;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.gpd.query.GenericPointDataReqMsg.GenericPointDataQueryKey;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.gpd.query.GenericPointDataReqMsg.GenericPointDataReqType;
|
||||
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingStnInfo;
|
||||
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingStnInfoCollection;
|
||||
import gov.noaa.nws.ncep.edex.common.sounding.NcSoundingTimeLines;
|
||||
|
||||
import java.io.File;
|
||||
import java.sql.Timestamp;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
|
@ -36,6 +44,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.hibernate.Criteria;
|
||||
|
@ -47,6 +57,7 @@ import org.hibernate.criterion.Restrictions;
|
|||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.parameter.Parameter;
|
||||
import com.raytheon.uf.common.pointdata.PointDataContainer;
|
||||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
|
@ -75,7 +86,7 @@ public class GenericPointDataDao extends
|
|||
|
||||
@Override
|
||||
public String[] getKeysRequiredForFileName() {
|
||||
return new String[] { GenericPointDataConstants.DB_REFTIME_NAME,
|
||||
return new String[] { GenericPointDataConstants.DB_REF_TIME,
|
||||
GenericPointDataConstants.DB_PROD_NAME,
|
||||
GenericPointDataConstants.DB_MASTER_LEVEL_NAME };
|
||||
}
|
||||
|
@ -96,7 +107,9 @@ public class GenericPointDataDao extends
|
|||
@Override
|
||||
public String getPointDataFileName(GenericPointDataRecord p) {
|
||||
Date refTime = (p.getDataTime().getRefTime());
|
||||
String dateStr = hdfFileDateFormat.format(refTime);
|
||||
int forecasttime = p.getDataTime().getFcstTime();
|
||||
String dateStr = hdfFileDateFormat.format(refTime) + "-f"
|
||||
+ forecasttime;
|
||||
// System.out.println("gpd getPointDataFileName1 called and return: "+"gpd-"+p.getProductInfo().getName()+dateStr+/*"-"+p.getReportType().getMasterLevel().getName()+*/".h5");
|
||||
return "gpd-" + p.getProductInfo().getName() + dateStr + ".h5";
|
||||
}
|
||||
|
@ -123,10 +136,12 @@ public class GenericPointDataDao extends
|
|||
public String getPointDataFileName(Map<String, Object> dbResults) {
|
||||
String reportname = (String) dbResults
|
||||
.get(GenericPointDataConstants.DB_PROD_NAME);
|
||||
// String lmName=
|
||||
// (String)dbResults.get(GenericPointDataConstants.DB_MASTER_LEVEL_NAME);
|
||||
int forecasttime = (Integer) dbResults
|
||||
.get(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
String dateStr = hdfFileDateFormat.format(dbResults
|
||||
.get(GenericPointDataConstants.DB_REFTIME_NAME));
|
||||
.get(GenericPointDataConstants.DB_REF_TIME))
|
||||
+ "-f"
|
||||
+ forecasttime;
|
||||
String filename = PLUGIN_HDF5_DIR + reportname + File.separator
|
||||
// + lmName + File.separator
|
||||
+ this.pluginName + "-" + reportname + /* "-"+lmName+ */dateStr
|
||||
|
@ -158,7 +173,10 @@ public class GenericPointDataDao extends
|
|||
|
||||
Date refTime = ((PluginDataObject) persistable).getDataTime()
|
||||
.getRefTime();
|
||||
String dateStr = hdfFileDateFormat.format(refTime);
|
||||
int forecasttime = ((PluginDataObject) persistable).getDataTime()
|
||||
.getFcstTime();
|
||||
String dateStr = hdfFileDateFormat.format(refTime) + "-f"
|
||||
+ forecasttime;
|
||||
String fileName = persistable.getPluginName() + "-"
|
||||
+ rec.getProductInfo().getName() + /*
|
||||
* "-"+rec.getReportType().
|
||||
|
@ -174,11 +192,17 @@ public class GenericPointDataDao extends
|
|||
/*
|
||||
* This function is for development testing.. not used in production code
|
||||
*/
|
||||
public PointDataDescription getPointDataDescription()
|
||||
throws SerializationException {
|
||||
public PointDataDescription getPointDataDescription() throws JAXBException {
|
||||
if (pdd == null) {
|
||||
pdd = PointDataDescription.fromStream(this.getClass()
|
||||
.getResourceAsStream("/res/pointdata/gpd.xml"));
|
||||
try {
|
||||
pdd = PointDataDescription.fromStream(this.getClass()
|
||||
.getResourceAsStream("/res/pointdata/gpd.xml"));
|
||||
} catch (SerializationException e) {
|
||||
// TODO Auto-generated catch block. Please revise as
|
||||
// appropriate.
|
||||
// statusHandler.handle(Priority.PROBLEM,
|
||||
// e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
return pdd;
|
||||
}
|
||||
|
@ -415,6 +439,7 @@ public class GenericPointDataDao extends
|
|||
|
||||
Criterion nameCrit = Restrictions.eq("name", prod.getName());
|
||||
crit.add(nameCrit);
|
||||
// query
|
||||
List<?> vals = crit.list();
|
||||
if (vals.size() > 0) {
|
||||
// the product is already in DB
|
||||
|
@ -440,7 +465,7 @@ public class GenericPointDataDao extends
|
|||
// prod version > 0, disallow update prod info
|
||||
} else {
|
||||
// TBD...do we need clone it?
|
||||
returnProdInfo = dbProdInfo;// .clone();
|
||||
returnProdInfo = dbProdInfo;
|
||||
}
|
||||
|
||||
} else if (createProd) {
|
||||
|
@ -630,6 +655,15 @@ public class GenericPointDataDao extends
|
|||
GenericPointDataQueryKey quertKey, String stnId, float slat,
|
||||
float slon, GenericPointDataProductInfo prodInfo, int productVersion)
|
||||
throws Exception {
|
||||
return (getPointDataContainer(refTime, null, quertKey, stnId, slat,
|
||||
slon, prodInfo, productVersion));
|
||||
}
|
||||
|
||||
private PointDataContainer getPointDataContainer(Date refTime,
|
||||
Date rangeStartTime, GenericPointDataQueryKey quertKey,
|
||||
String stnId, float slat, float slon,
|
||||
GenericPointDataProductInfo prodInfo, int productVersion)
|
||||
throws Exception {
|
||||
String prodName = prodInfo.getName();
|
||||
|
||||
PointDataContainer pdc = null;
|
||||
|
@ -648,15 +682,14 @@ public class GenericPointDataDao extends
|
|||
}
|
||||
returnParametersString.append(parameter);
|
||||
}
|
||||
// also add the 3 HDF5 mandatory datasets
|
||||
// also add the 3 HDF5 mandatory data sets
|
||||
returnParametersString.append(","
|
||||
+ GenericPointDataConstants.HDF5_LEVEL_VALUE);
|
||||
returnParametersString.append(","
|
||||
+ GenericPointDataConstants.HDF5_NUM_LEVEL);
|
||||
returnParametersString.append(","
|
||||
+ GenericPointDataConstants.HDF5_STN_ID);
|
||||
System.out.println("gpd dao hdf5 parameterlist="
|
||||
+ returnParametersString.toString());
|
||||
// System.out.println("gpd dao hdf5 parameterlist="+returnParametersString.toString());
|
||||
|
||||
// 2nd:: add return fields form DB. the parameter name need to be
|
||||
// defined in
|
||||
|
@ -666,7 +699,10 @@ public class GenericPointDataDao extends
|
|||
+ GenericPointDataConstants.DB_STN_CATALOGTYPE);
|
||||
returnParametersString.append("," + GenericPointDataConstants.DB_SLAT);
|
||||
returnParametersString.append("," + GenericPointDataConstants.DB_SLON);
|
||||
|
||||
returnParametersString.append(","
|
||||
+ GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
returnParametersString.append(","
|
||||
+ GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
// parameters defined in
|
||||
// /gov.noaa.nws.ncep.edex.plugin.gpd/utility/common_static/base/path/gpdPathKeys.xml
|
||||
// AND those returned by dao.getKeysRequiredForFileName()
|
||||
|
@ -681,25 +717,33 @@ public class GenericPointDataDao extends
|
|||
if (quertKey == GenericPointDataQueryKey.BY_STN_ID)
|
||||
pdq.addParameter("location.stationId", stnId, "=");
|
||||
else if (quertKey == GenericPointDataQueryKey.BY_SLAT_SLON) {
|
||||
pdq.addParameter("slat", Float.toString(slat + 0.001f), "<");
|
||||
pdq.addParameter("slon", Float.toString(slon + 0.001f), "<");
|
||||
pdq.addParameter("slat", Float.toString(slat - 0.001f), ">");
|
||||
pdq.addParameter("slon", Float.toString(slon - 0.001f), ">");
|
||||
pdq.addParameter("slat", Float.toString(slat), "=");
|
||||
pdq.addParameter("slon", Float.toString(slon), "=");
|
||||
|
||||
}
|
||||
|
||||
String dateStr = dbRefTimeFormat.format(refTime);
|
||||
pdq.addParameter("dataTime.refTime", dateStr, "=");
|
||||
pdq.addParameter("productVersion", Integer.toString(productVersion),
|
||||
"=");
|
||||
System.out.println("requestig refTime = " + dateStr);
|
||||
pdq.addParameter(GenericPointDataConstants.DB_REF_TIME, dateStr, "=");
|
||||
if (rangeStartTime != null) {
|
||||
String rangedateStr = dbRefTimeFormat.format(rangeStartTime);
|
||||
pdq.addParameter(GenericPointDataConstants.DB_RANGESTART_TIME,
|
||||
rangedateStr, "=");
|
||||
}
|
||||
pdq.addParameter(GenericPointDataConstants.DB_PRODUCT_VERSION,
|
||||
Integer.toString(productVersion), "=");
|
||||
// System.out.println("requestig refTime = "+ dateStr);
|
||||
pdq.requestAllLevels();
|
||||
pdc = pdq.execute();
|
||||
try {
|
||||
pdc = pdq.execute();
|
||||
} catch (StorageException e) {
|
||||
System.out.println("HDF5 query StorageException " + e);
|
||||
}
|
||||
|
||||
return pdc;
|
||||
}
|
||||
|
||||
/*
|
||||
* TBM...Chin delet this leter...not used public
|
||||
* TBM...Chin delete this later...not used public
|
||||
* GenericPointDataProductContainer getGpdProduct(Date refTime, String
|
||||
* prodName,boolean useSpecifiedProductVersion, int productVersion)throws
|
||||
* Exception{ GenericPointDataProductInfo prodInfo = getGpdProdInfo(
|
||||
|
@ -791,6 +835,7 @@ public class GenericPointDataDao extends
|
|||
GenericPointDataQueryKey key, String stnId, float slat, float slon,
|
||||
String prodName, boolean useSpecifiedProductVersion,
|
||||
int productVersion) throws Exception {
|
||||
long t01 = System.currentTimeMillis();
|
||||
GenericPointDataProductInfo prodInfo = getGpdProdInfo(prodName);
|
||||
if (prodInfo == null) {
|
||||
System.out.println("report is not in DB");
|
||||
|
@ -809,7 +854,7 @@ public class GenericPointDataDao extends
|
|||
System.out.println("pdc is null");
|
||||
return null;
|
||||
}
|
||||
System.out.println("pdc CurrentSz()=" + pdc.getCurrentSz());
|
||||
// System.out.println("pdc CurrentSz()="+pdc.getCurrentSz());
|
||||
GenericPointDataProductContainer prodCon = new GenericPointDataProductContainer();
|
||||
prodCon.setProductInfo(prodInfo);
|
||||
prodCon.setRefTime(refTime);
|
||||
|
@ -891,6 +936,19 @@ public class GenericPointDataDao extends
|
|||
// slon value is retrieved already, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.DB_SLON);
|
||||
}
|
||||
String utFlag = null;
|
||||
if (parameters.contains(GenericPointDataConstants.DB_UTILITY_FLAGS)) {
|
||||
utFlag = pdv
|
||||
.getString(GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
// System.out.println("utFlag= "+ utFlag);
|
||||
parameters.remove(GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
}
|
||||
int forecastTime = 0;
|
||||
if (parameters.contains(GenericPointDataConstants.DB_FORECAST_TIME)) {
|
||||
forecastTime = pdv
|
||||
.getInt(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
parameters.remove(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
}
|
||||
// PDV id is not returned back to user, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.HDF5_PDV_ID);
|
||||
|
||||
|
@ -904,6 +962,8 @@ public class GenericPointDataDao extends
|
|||
stnPd.setNumLevel(numLevel);
|
||||
stnPd.setSlat(rtnslat);
|
||||
stnPd.setSlon(rtnslon);
|
||||
stnPd.setForecastTime(forecastTime);
|
||||
stnPd.setUtilityFlag(utFlag);
|
||||
for (String parm : parameters) {
|
||||
if (numLevel > 1) {
|
||||
// these parameters are data parameters and should be 2
|
||||
|
@ -920,6 +980,7 @@ public class GenericPointDataDao extends
|
|||
levelList.get(j).getGpdParameters().add(gpdParm);
|
||||
}
|
||||
} else {
|
||||
// System.out.println("parm ="+parm);
|
||||
GenericPointDataParameter gpdParm = new GenericPointDataParameter(
|
||||
parm, pdv.getFloat(parm));
|
||||
levelList.get(0).getGpdParameters().add(gpdParm);
|
||||
|
@ -927,6 +988,10 @@ public class GenericPointDataDao extends
|
|||
}
|
||||
prodCon.getStnProdLst().add(stnPd);
|
||||
}
|
||||
|
||||
long t02 = System.currentTimeMillis();
|
||||
System.out.println("ThrifClient: getGpdProduct() took " + (t02 - t01)
|
||||
+ " ms in total for query stn=" + stnId);
|
||||
return prodCon;
|
||||
}
|
||||
|
||||
|
@ -952,8 +1017,8 @@ public class GenericPointDataDao extends
|
|||
System.out.println("pdc is null");
|
||||
continue;
|
||||
}
|
||||
System.out.println(refTime.toString() + " pdc CurrentSz()="
|
||||
+ pdc.getCurrentSz());
|
||||
// System.out.println(refTime.toString()
|
||||
// +" pdc CurrentSz()="+pdc.getCurrentSz());
|
||||
|
||||
for (int i = 0; i < pdc.getCurrentSz(); i++) {
|
||||
PointDataView pdv = pdc.readRandom(i);
|
||||
|
@ -1036,6 +1101,23 @@ public class GenericPointDataDao extends
|
|||
// slon value is retrieved already, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.DB_SLON);
|
||||
}
|
||||
String utFlag = null;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.DB_UTILITY_FLAGS)) {
|
||||
utFlag = pdv
|
||||
.getString(GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
System.out.println("utFlag= " + utFlag);
|
||||
parameters
|
||||
.remove(GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
}
|
||||
int forecastTime = 0;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.DB_FORECAST_TIME)) {
|
||||
forecastTime = pdv
|
||||
.getInt(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
parameters
|
||||
.remove(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
}
|
||||
// PDV id is not returned back to user, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.HDF5_PDV_ID);
|
||||
|
||||
|
@ -1049,6 +1131,8 @@ public class GenericPointDataDao extends
|
|||
stnPd.setNumLevel(numLevel);
|
||||
stnPd.setSlat(rtnslat);
|
||||
stnPd.setSlon(rtnslon);
|
||||
stnPd.setForecastTime(forecastTime);
|
||||
stnPd.setUtilityFlag(utFlag);
|
||||
for (String parm : parameters) {
|
||||
if (numLevel > 1) {
|
||||
// these parameters are data parameters and should be 2
|
||||
|
@ -1076,8 +1160,182 @@ public class GenericPointDataDao extends
|
|||
return stnProdList;
|
||||
}
|
||||
|
||||
public List<GenericPointDataStationProduct> getGpdStationModelSndProduct(
|
||||
List<Date> rangeStartTimeList, Date referenceTime,
|
||||
GenericPointDataQueryKey key, String stnId, float slat, float slon,
|
||||
String prodName) throws Exception {
|
||||
long t01 = System.currentTimeMillis();
|
||||
GenericPointDataProductInfo prodInfo = getGpdProdInfo(prodName);
|
||||
if (prodInfo == null) {
|
||||
System.out.println("product is not in DB");
|
||||
return null;
|
||||
}
|
||||
int productVersion = getGpdProductLatestVersion(referenceTime, prodName);
|
||||
if (productVersion < 0) {
|
||||
System.out.println("product version not available");
|
||||
return null;
|
||||
}
|
||||
|
||||
List<GenericPointDataStationProduct> stnProdList = new ArrayList<GenericPointDataStationProduct>();
|
||||
for (Date rangeStartTime : rangeStartTimeList) {
|
||||
|
||||
PointDataContainer pdc = getPointDataContainer(referenceTime,
|
||||
rangeStartTime, key, stnId, slat, slon, prodInfo,
|
||||
productVersion);
|
||||
if (pdc == null) {
|
||||
System.out.println("pdc is null");
|
||||
continue;
|
||||
}
|
||||
// System.out.println(rangeStartTime.toString()
|
||||
// +" pdc CurrentSz()="+pdc.getCurrentSz());
|
||||
|
||||
for (int i = 0; i < pdc.getCurrentSz(); i++) {
|
||||
PointDataView pdv = pdc.readRandom(i);
|
||||
// System.out.println("pdv#"+i+" *********************************************");
|
||||
Set<String> parameters = new HashSet<String>(pdv.getContainer()
|
||||
.getParameters());
|
||||
int numLevel = 0;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)) {
|
||||
numLevel = pdv
|
||||
.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL);
|
||||
// System.out.println("numLevel= "+ numLevel);
|
||||
// numLevel value is retrieved already, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL);
|
||||
} else
|
||||
continue; // level number is 0, no need to continue on this
|
||||
// PDV.
|
||||
|
||||
List<GenericPointDataLevel> levelList;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)) {
|
||||
levelList = new ArrayList<GenericPointDataLevel>(numLevel);
|
||||
if (numLevel > 1) {
|
||||
Number[] num = pdv
|
||||
.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);// pdv.getNumberAllLevels(parm,numLevel);
|
||||
int count = 0;
|
||||
for (Number n : num) {
|
||||
count++;
|
||||
if (count > numLevel)
|
||||
break;
|
||||
// System.out.println("Level " +count+
|
||||
// " value="+n.floatValue());
|
||||
GenericPointDataLevel gpdLevel = new GenericPointDataLevel();
|
||||
gpdLevel.setLevelValue(n.floatValue());
|
||||
levelList.add(gpdLevel);
|
||||
}
|
||||
} else {
|
||||
GenericPointDataLevel gpdLevel = new GenericPointDataLevel();
|
||||
gpdLevel.setLevelValue(pdv
|
||||
.getFloat(GenericPointDataConstants.HDF5_LEVEL_VALUE));
|
||||
levelList.add(gpdLevel);
|
||||
}
|
||||
// level value is retrieved already, so drop it here
|
||||
parameters
|
||||
.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE);
|
||||
} else
|
||||
continue; // no level value, no need to continue on this
|
||||
// PDV.
|
||||
|
||||
int stnCatalogType = ObStation.CAT_TYPE_MESONET;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)) {
|
||||
stnCatalogType = pdv
|
||||
.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE);
|
||||
// System.out.println("stnCatalogType= "+ stnCatalogType);
|
||||
// DB_STN_CATALOGTYPE value is retrieved already, so drop it
|
||||
// here
|
||||
parameters
|
||||
.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE);
|
||||
}
|
||||
String rtnstnId = stnId;
|
||||
if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)) {
|
||||
rtnstnId = pdv
|
||||
.getString(GenericPointDataConstants.HDF5_STN_ID);
|
||||
// System.out.println("stnId= "+ rtnstnId);
|
||||
// stnId is input parameter, can drop it here.
|
||||
parameters.remove(GenericPointDataConstants.HDF5_STN_ID);
|
||||
}
|
||||
float rtnslat = slat;
|
||||
if (parameters.contains(GenericPointDataConstants.DB_SLAT)) {
|
||||
rtnslat = pdv.getFloat(GenericPointDataConstants.DB_SLAT);
|
||||
// System.out.println("slat= "+ rtnslat);
|
||||
// slat value is retrieved already, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.DB_SLAT);
|
||||
}
|
||||
float rtnslon = slon;
|
||||
if (parameters.contains(GenericPointDataConstants.DB_SLON)) {
|
||||
rtnslon = pdv.getFloat(GenericPointDataConstants.DB_SLON);
|
||||
// System.out.println("slon= "+ rtnslon);
|
||||
// slon value is retrieved already, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.DB_SLON);
|
||||
}
|
||||
String utFlag = null;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.DB_UTILITY_FLAGS)) {
|
||||
utFlag = pdv
|
||||
.getString(GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
System.out.println("utFlag= " + utFlag);
|
||||
parameters
|
||||
.remove(GenericPointDataConstants.DB_UTILITY_FLAGS);
|
||||
}
|
||||
int forecastTime = 0;
|
||||
if (parameters
|
||||
.contains(GenericPointDataConstants.DB_FORECAST_TIME)) {
|
||||
forecastTime = pdv
|
||||
.getInt(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
parameters
|
||||
.remove(GenericPointDataConstants.DB_FORECAST_TIME);
|
||||
}
|
||||
// PDV id is not returned back to user, so drop it here
|
||||
parameters.remove(GenericPointDataConstants.HDF5_PDV_ID);
|
||||
|
||||
GenericPointDataStationProduct stnPd = new GenericPointDataStationProduct();
|
||||
stnPd.setProductName(prodName);
|
||||
stnPd.setRefTime(referenceTime);
|
||||
stnPd.setLevelLst(levelList);
|
||||
stnPd.setProductVersion(productVersion);
|
||||
stnPd.getLocation().setStationId(rtnstnId);
|
||||
stnPd.getLocation().setCatalogType(stnCatalogType);
|
||||
stnPd.setNumLevel(numLevel);
|
||||
stnPd.setSlat(rtnslat);
|
||||
stnPd.setSlon(rtnslon);
|
||||
stnPd.setForecastTime(forecastTime);
|
||||
stnPd.setUtilityFlag(utFlag);
|
||||
|
||||
for (String parm : parameters) {
|
||||
if (numLevel > 1) {
|
||||
// these parameters are data parameters and should be 2
|
||||
// dimensional float value per design
|
||||
// If a new "meta" data is queried, then we should take
|
||||
// care of that data specifically before here.
|
||||
Number[] num = pdv.getNumberAllLevels(parm);// ,numLevel);
|
||||
// System.out.println("parm ="+parm);
|
||||
for (int j = 0; j < numLevel; j++) {
|
||||
Number n = num[j];
|
||||
// System.out.println(" value="+n.floatValue());
|
||||
GenericPointDataParameter gpdParm = new GenericPointDataParameter(
|
||||
parm, n.floatValue());
|
||||
levelList.get(j).getGpdParameters().add(gpdParm);
|
||||
}
|
||||
} else {
|
||||
GenericPointDataParameter gpdParm = new GenericPointDataParameter(
|
||||
parm, pdv.getFloat(parm));
|
||||
levelList.get(0).getGpdParameters().add(gpdParm);
|
||||
}
|
||||
}
|
||||
stnProdList.add(stnPd);
|
||||
}
|
||||
}
|
||||
long t02 = System.currentTimeMillis();
|
||||
System.out.println("ThrifClient: getGpdStationModelSndProduct() took "
|
||||
+ (t02 - t01) + " ms in total for query stn=" + stnId);
|
||||
|
||||
return stnProdList;
|
||||
}
|
||||
|
||||
/*
|
||||
* TBM...Chin delet this leter...not used public
|
||||
* TBM...Chin delete this later...not used public
|
||||
* GenericPointDataStationProduct getGpdStationProduct(Date refTime,
|
||||
* GenericPointDataQueryKey key, String stnId, double slat, double slon,
|
||||
* String reportName,boolean useSpecifiedProductVersion, int
|
||||
|
@ -1291,8 +1549,9 @@ public class GenericPointDataDao extends
|
|||
GenericPointDataRecord rec = (GenericPointDataRecord) pdo;
|
||||
String directory = PLUGIN_HDF5_DIR
|
||||
+ rec.getProductInfo().getName();
|
||||
|
||||
String dateStr = hdfFileDateFormat.format(refTime);
|
||||
int forecasttime = rec.getDataTime().getFcstTime();
|
||||
String dateStr = hdfFileDateFormat.format(refTime)
|
||||
+ "-f" + forecasttime;
|
||||
String fileName = this.pluginName + "-"
|
||||
+ rec.getProductInfo().getName() + dateStr
|
||||
+ ".h5";
|
||||
|
@ -1343,4 +1602,123 @@ public class GenericPointDataDao extends
|
|||
return results;
|
||||
}
|
||||
|
||||
/*
|
||||
* Return distinct reference time lines for one product
|
||||
*/
|
||||
public NcSoundingTimeLines getGpdProductTimeline(String prodName) {
|
||||
Object[] synopTimeAry = null;
|
||||
NcSoundingTimeLines tl = new NcSoundingTimeLines();
|
||||
String queryStr;
|
||||
queryStr = new String(
|
||||
"Select Distinct reftime FROM gpd where productinfo_name='"
|
||||
+ prodName + "' ORDER BY reftime DESC");
|
||||
synopTimeAry = (Object[]) executeSQLQuery(queryStr);
|
||||
tl.setTimeLines(synopTimeAry);
|
||||
return tl;
|
||||
}
|
||||
|
||||
/*
|
||||
* Return distinct rangestart times for one product at one reference time.
|
||||
* Input reference time string format is "yyyy-mm-dd HH"
|
||||
*/
|
||||
public NcSoundingTimeLines getGpdProductRangestartTimes(String prodName,
|
||||
String refTimeStr) {
|
||||
|
||||
Object[] refTimeAry = null;
|
||||
NcSoundingTimeLines tl = new NcSoundingTimeLines();
|
||||
|
||||
String queryStr = new String(
|
||||
"Select Distinct rangestart FROM gpd where productinfo_name='"
|
||||
+ prodName + "' AND reftime='" + refTimeStr + ":00:00'"
|
||||
+ " ORDER BY rangestart");
|
||||
refTimeAry = (Object[]) executeSQLQuery(queryStr);
|
||||
tl.setTimeLines(refTimeAry);
|
||||
|
||||
return tl;
|
||||
}
|
||||
|
||||
/*
|
||||
* Return distinct station id(s) for one product at one reference time
|
||||
*/
|
||||
public NcSoundingStnInfoCollection getGpdStationInfoCollection(
|
||||
String selectedRefTime, String selectedRangeStartTime,
|
||||
String prodName) {
|
||||
NcSoundingStnInfoCollection stnInfoCol = new NcSoundingStnInfoCollection();
|
||||
List<NcSoundingStnInfo> stationInfoList = new ArrayList<NcSoundingStnInfo>();
|
||||
String queryStr;
|
||||
Object[] rtnobjArray;
|
||||
queryStr = new String(
|
||||
"Select Distinct slat, slon, id, location_gid, reftime, rangestart FROM gpd where reftime='"
|
||||
+ selectedRefTime
|
||||
+ "' AND rangestart='"
|
||||
+ selectedRangeStartTime
|
||||
+ "' AND productinfo_name='"
|
||||
+ prodName
|
||||
+ "' AND slat BETWEEN -89.9 AND 89.9 AND slon BETWEEN -179.9 AND 179.9");
|
||||
rtnobjArray = executeSQLQuery(queryStr);
|
||||
String stnId = "";
|
||||
Double slat, slon;
|
||||
Timestamp synoptictime = null, rsTime = null;
|
||||
for (int j = 0; j < rtnobjArray.length; j++) {
|
||||
Object[] objArray = (Object[]) rtnobjArray[j];
|
||||
// ids.add(((Integer)objArray[2]));
|
||||
// We save lat/lon as float in DB.
|
||||
// To make sure the double number get the same precision as the
|
||||
// float number saved in DB
|
||||
// we have to do the following conversion.
|
||||
slat = new Double(objArray[0].toString());
|
||||
slon = new Double(objArray[1].toString());
|
||||
stnId = (String) objArray[3];
|
||||
stnId = stnId.replace("1000-", "");
|
||||
synoptictime = (Timestamp) objArray[4];
|
||||
rsTime = (Timestamp) objArray[5];
|
||||
NcSoundingStnInfo stn = stnInfoCol.getNewStnInfo();
|
||||
stn.setStnId(stnId);
|
||||
stn.setStationLongitude(slon);
|
||||
stn.setStationLatitude(slat);
|
||||
stn.setSynopTime(synoptictime);
|
||||
stn.setRangeStartTime(rsTime);
|
||||
stationInfoList.add((NcSoundingStnInfo) stn);
|
||||
}
|
||||
NcSoundingStnInfo[] stationInfoAry = new NcSoundingStnInfo[stationInfoList
|
||||
.size()];
|
||||
stnInfoCol.setStationInfo(stationInfoList.toArray(stationInfoAry));
|
||||
// *System.out.println("stn size = "+
|
||||
// stnInfoCol.getStationInfo().length);
|
||||
return stnInfoCol;
|
||||
}
|
||||
|
||||
public Object[] getGpdAvailProducts(GenericPointDataReqType reqType) {
|
||||
String queryStr;
|
||||
Object[] rtnobjArray;
|
||||
switch (reqType) {
|
||||
case GET_GPD_AVAILABLE_OBSERVED_SOUNDING_PRODUCTS:
|
||||
queryStr = new String(
|
||||
"Select Distinct productinfo_name FROM gpd where productinfo_name IN (Select Distinct name FROM gpd_productinfo where maxnumberoflevel > 8) AND gpd.utilityflags = '[]'");
|
||||
break;
|
||||
case GET_GPD_AVAILABLE_MODEL_SOUNDING_PRODUCTS:
|
||||
queryStr = new String(
|
||||
"Select Distinct productinfo_name FROM gpd where productinfo_name IN (Select Distinct name FROM gpd_productinfo where maxnumberoflevel > 8) AND gpd.utilityflags = '[FCST_USED]'");
|
||||
break;
|
||||
case GET_GPD_AVAILABLE_SURFACE_PRODUCTS:
|
||||
queryStr = new String(
|
||||
"Select Distinct name FROM gpd_productinfo where maxnumberoflevel=1");
|
||||
break;
|
||||
case GET_GPD_ALL_AVAILABLE_PRODUCTS:
|
||||
queryStr = new String("Select Distinct name FROM gpd_productinfo");
|
||||
break;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
rtnobjArray = executeSQLQuery(queryStr);
|
||||
|
||||
// List<String> prodList = new ArrayList<String>();
|
||||
// for (int j =0; j <rtnobjArray.length; j++){
|
||||
// Object[] objArray = (Object[] )rtnobjArray[j];
|
||||
// System.out.println("prod="+rtnobjArray[j]);
|
||||
// String prodName= (String)rtnobjArray[j];
|
||||
// prodList.add(prodName);
|
||||
// }
|
||||
return rtnobjArray;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
/**
|
||||
*
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
/**
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
/**
|
||||
*
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
/**
|
||||
*
|
||||
*
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
* <pre>
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
/**
|
||||
*
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
*
|
||||
|
@ -19,6 +23,7 @@ package gov.noaa.nws.ncep.common.dataplugin.gpd.product;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
|
@ -30,6 +35,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
|||
import com.raytheon.uf.common.pointdata.spatial.ObStation;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
||||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
import com.raytheon.uf.common.time.DataTime.FLAG;
|
||||
|
||||
@DynamicSerialize
|
||||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
|
@ -65,6 +71,20 @@ public class GenericPointDataStationProduct {
|
|||
@XmlAttribute
|
||||
protected Date refTime;
|
||||
|
||||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
//set to -1, if forecast time is not used, foe example, for observed surface, observed snd type of data.
|
||||
//set forecast time in second.Its value is computed starting from reference time. So,
|
||||
//if forecastTime =0, means its forecast time is the same time as reference time
|
||||
//if forecastTime =3600, means its forecast time is one hour after the reference time
|
||||
//When saving data, this value is actually saved to DataTime in PluginDataObject Object. And
|
||||
//utilityFlags is set to "FCST_USED" to indicate forecast time is used or not.
|
||||
protected int forecastTime;
|
||||
|
||||
@DynamicSerializeElement
|
||||
@XmlAttribute
|
||||
private String utilityFlag = null;
|
||||
|
||||
//list of master level values
|
||||
@DynamicSerializeElement
|
||||
@XmlElement(name="GPD-Level-Parameters")
|
||||
|
@ -104,7 +124,7 @@ public class GenericPointDataStationProduct {
|
|||
return slat;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void setSlat(float slat) {
|
||||
this.slat = slat;
|
||||
}
|
||||
|
@ -177,4 +197,25 @@ public class GenericPointDataStationProduct {
|
|||
}
|
||||
|
||||
|
||||
public int getForecastTime() {
|
||||
return forecastTime;
|
||||
}
|
||||
|
||||
|
||||
public void setForecastTime(int forecastTime) {
|
||||
this.forecastTime = forecastTime;
|
||||
}
|
||||
|
||||
|
||||
public String getUtilityFlag() {
|
||||
return utilityFlag;
|
||||
}
|
||||
|
||||
|
||||
public void setUtilityFlag(String utilityFlag) {
|
||||
this.utilityFlag = utilityFlag;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
/**
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
|
@ -156,7 +160,7 @@ public class GenericPointDataQuery extends PointDataQuery {
|
|||
ids.get(listIndex).add(id);
|
||||
indexes.get(listIndex).add(idx);
|
||||
}
|
||||
long t0 = System.currentTimeMillis();
|
||||
//long t0 = System.currentTimeMillis();
|
||||
for (int i = 0; i < files.size(); i++) {
|
||||
File file = new File(files.get(i));
|
||||
//for(String att: hdf5attribList){
|
||||
|
@ -180,10 +184,10 @@ public class GenericPointDataQuery extends PointDataQuery {
|
|||
masterPDC.setCurrentSz(masterPDC.getAllocatedSz());
|
||||
}
|
||||
}
|
||||
long t1 = System.currentTimeMillis();
|
||||
System.out
|
||||
.println("Total time (ms) spent on pointdata hdf5 retrieval (all files): "
|
||||
+ (t1 - t0));
|
||||
//long t1 = System.currentTimeMillis();
|
||||
//System.out
|
||||
// .println("Total time (ms) spent on pointdata hdf5 retrieval (all files): "
|
||||
// + (t1 - t0));
|
||||
}
|
||||
|
||||
if (!dbParamDesc.isEmpty()) {
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
package gov.noaa.nws.ncep.common.dataplugin.gpd.query;
|
||||
/**
|
||||
* This code has unlimited rights, and is provided "as is" by the National Centers
|
||||
* for Environmental Prediction, without warranty of any kind, either expressed or implied,
|
||||
* including but not limited to the implied warranties of merchantability and/or fitness
|
||||
* for a particular purpose.
|
||||
*
|
||||
*
|
||||
* This code has been developed by the NCEP-SIB for use in the AWIPS2 system.
|
||||
|
@ -33,13 +37,24 @@ import com.raytheon.uf.common.serialization.comm.IServerRequest;
|
|||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
public class GenericPointDataReqMsg implements IServerRequest {
|
||||
public static enum GenericPointDataReqType{
|
||||
// get GPD in Java Object format
|
||||
// get GPD data in Java Object format
|
||||
GET_GPD_PRODUCT_OBJECT,
|
||||
GET_GPD_STATION_PRODUCT_OBJECT,
|
||||
GET_GPD_MOVING_PRODUCT_OBJECT,
|
||||
GET_GPD_STATION_PRODUCT_OBJECT_LIST,
|
||||
GET_GPD_MOVING_PRODUCT_OBJECT_LIST,
|
||||
GET_GPD_STATION_MDL_SND_PRODUCT_OBJECT_LIST,
|
||||
GET_GPD_MOVING_MDL_SND_PRODUCT_OBJECT_LIST,
|
||||
GET_GPD_PRODUCT_INFO_OBJECT,
|
||||
GET_GPD_PRODUCT_TIMELINE_OBJECT,
|
||||
GET_GPD_PRODUCT_RANGESTART_TIME_OBJECT,
|
||||
GET_GPD_STATION_INFO_COLLECTION_OBJECT,
|
||||
GET_GPD_ALL_AVAILABLE_PRODUCTS,
|
||||
GET_GPD_AVAILABLE_MODEL_SOUNDING_PRODUCTS, //GPD pfc sounding
|
||||
GET_GPD_AVAILABLE_OBSERVED_SOUNDING_PRODUCTS,
|
||||
GET_GPD_AVAILABLE_SURFACE_PRODUCTS,
|
||||
|
||||
//The following request types should not be used by CAVA JAVA applications
|
||||
//get/save product in XML format
|
||||
GET_GPD_PRODUCT_XML,
|
||||
GET_GPD_STATION_PRODUCT_XML,
|
||||
|
@ -52,6 +67,9 @@ public class GenericPointDataReqMsg implements IServerRequest {
|
|||
GET_GPD_MOVING_PRODUCT_GEMPAK_TBL,
|
||||
GET_GPD_PRODUCT_INFO_GEMPAK_TBL,
|
||||
STORE_GPD_PRODUCT_FROM_GEMPAK_TBL,
|
||||
STORE_GPD_MDL_SND_PRODUCT_FROM_GEMPAK_TBL,
|
||||
STORE_GPD_OBS_SND_PRODUCT_FROM_GEMPAK_TBL,
|
||||
STORE_GPD_OBS_SFC_PRODUCT_FROM_GEMPAK_TBL,
|
||||
//Purge GPD DB
|
||||
PURGE_GPD_PRODUCT_ONETIME,
|
||||
PURGE_GPD_PRODUCT_ALLTIME,
|
||||
|
@ -61,9 +79,9 @@ public class GenericPointDataReqMsg implements IServerRequest {
|
|||
public static enum GenericPointDataQueryKey{
|
||||
BY_STN_ID,
|
||||
BY_SLAT_SLON,
|
||||
BY_REPORT_NAME
|
||||
BY_PRODUCT_NAME
|
||||
}
|
||||
|
||||
|
||||
//required for all
|
||||
@DynamicSerializeElement
|
||||
@XmlAttribute(required = true)
|
||||
|
@ -117,9 +135,20 @@ public class GenericPointDataReqMsg implements IServerRequest {
|
|||
@DynamicSerializeElement
|
||||
private String gpdDataString;
|
||||
|
||||
// used for query a list of time line for one station or moving product
|
||||
// used for query a list of time line's sounding data for one station or moving product
|
||||
//con be a list of reference time (for observed data) or a list of range start time (for pfc data)
|
||||
// used for CAVE Java query only...for now
|
||||
@DynamicSerializeElement
|
||||
private List<Date> refTimeList;
|
||||
private List<Date> queryTimeList;
|
||||
|
||||
//used for query station collection, forecast time, or pfc sounding data
|
||||
// used for CAVE Java query only...for now
|
||||
@DynamicSerializeElement
|
||||
private String refTimeStr;
|
||||
//used for query station collection for model/pfc sounding
|
||||
// used for CAVE Java query only...for now
|
||||
@DynamicSerializeElement
|
||||
private String rangeStartTimeStr;
|
||||
|
||||
public GenericPointDataReqMsg() {
|
||||
super();
|
||||
|
@ -320,12 +349,24 @@ public class GenericPointDataReqMsg implements IServerRequest {
|
|||
this.maxNumLevel = maxNumLevel;
|
||||
}
|
||||
|
||||
public List<Date> getRefTimeList() {
|
||||
return refTimeList;
|
||||
public List<Date> getQueryTimeList() {
|
||||
return queryTimeList;
|
||||
}
|
||||
|
||||
public void setRefTimeList(List<Date> refTimeList) {
|
||||
this.refTimeList = refTimeList;
|
||||
public void setQueryTimeList(List<Date> queryTimeList) {
|
||||
this.queryTimeList = queryTimeList;
|
||||
}
|
||||
public String getRefTimeStr() {
|
||||
return refTimeStr;
|
||||
}
|
||||
public void setRefTimeStr(String refTimeStr) {
|
||||
this.refTimeStr = refTimeStr;
|
||||
}
|
||||
public String getRangeStartTimeStr() {
|
||||
return rangeStartTimeStr;
|
||||
}
|
||||
public void setRangeStartTimeStr(String rangeStartTimeStr) {
|
||||
this.rangeStartTimeStr = rangeStartTimeStr;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
* 05/2010 144 L. Lin Migration to TO11DR11.
|
||||
* 11/2013 1066 G. Hull call constructCRSfromWKT
|
||||
* Nov 14, 2013 2393 bclement added getGridGeometry()
|
||||
* 03/2014 TTR957 B. Yin Modified getGridGeometry() to handle native navigation
|
||||
* </pre>
|
||||
*/
|
||||
|
||||
|
@ -72,6 +73,9 @@ public class McidasMapCoverage extends PersistableDataObject implements
|
|||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
// projection id for native satellite navigation
|
||||
public static final int GVAR = 7585;
|
||||
|
||||
@Id
|
||||
private int pid;
|
||||
|
||||
|
@ -371,14 +375,33 @@ public class McidasMapCoverage extends PersistableDataObject implements
|
|||
public GridGeometry2D getGridGeometry()
|
||||
throws MismatchedDimensionException, FactoryException,
|
||||
TransformException {
|
||||
int nx = getNx();
|
||||
int ny = getNy();
|
||||
if (Double.isNaN(this.minX) || Double.isNaN(this.minY)) {
|
||||
findMins();
|
||||
|
||||
GridEnvelope gridRange;
|
||||
Envelope crsRange;
|
||||
if (projection == McidasMapCoverage.GVAR) { // for native projection
|
||||
minX = getUpperLeftElement();
|
||||
int maxX = getUpperLeftElement() + (getNx() * getElementRes());
|
||||
minY = getUpperLeftLine() + (getNy() * getLineRes());
|
||||
minY = -minY;
|
||||
int maxY = -1 * getUpperLeftLine();
|
||||
|
||||
gridRange = new GridEnvelope2D(0, 0, nx, ny);
|
||||
crsRange = new Envelope2D(getCrs(), new Rectangle2D.Double(minX,
|
||||
minY, maxX, maxY));
|
||||
}
|
||||
GridEnvelope gridRange = new GridEnvelope2D(0, 0, nx, ny);
|
||||
Envelope crsRange = new Envelope2D(getCrs(), new Rectangle2D.Double(
|
||||
minX, minY, nx * getDx(), ny * getDy()));
|
||||
|
||||
else {
|
||||
int nx = getNx();
|
||||
int ny = getNy();
|
||||
if (Double.isNaN(this.minX) || Double.isNaN(this.minY)) {
|
||||
findMins();
|
||||
}
|
||||
|
||||
gridRange = new GridEnvelope2D(0, 0, nx, ny);
|
||||
crsRange = new Envelope2D(getCrs(), new Rectangle2D.Double(minX,
|
||||
minY, nx * getDx(), ny * getDy()));
|
||||
}
|
||||
|
||||
return new GridGeometry2D(gridRange, crsRange);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
* 10/2009 144 T. Lee Created
|
||||
* 12/2009 144 T. Lee Migrated to TO11D6
|
||||
* 11/2013 1066 G. Hull constructCRSfromWKT (from McidasMapCoverage)
|
||||
* 03/2014 TTR957 B. Yin Moved constructCRSfromWKT to McidasCRSBuilder
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -19,10 +20,8 @@
|
|||
|
||||
package gov.noaa.nws.ncep.common.dataplugin.mcidas;
|
||||
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.mcidas.dao.McidasMapCoverageDao;
|
||||
import gov.noaa.nws.ncep.edex.util.McidasCRSBuilder;
|
||||
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
@ -30,9 +29,6 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.geotools.geometry.DirectPosition2D;
|
||||
import org.geotools.referencing.operation.DefaultMathTransformFactory;
|
||||
import org.opengis.parameter.ParameterValueGroup;
|
||||
import org.opengis.referencing.FactoryException;
|
||||
import org.opengis.referencing.NoSuchIdentifierException;
|
||||
import org.opengis.referencing.crs.CoordinateReferenceSystem;
|
||||
import org.opengis.referencing.crs.ProjectedCRS;
|
||||
import org.opengis.referencing.operation.MathTransform;
|
||||
|
||||
|
@ -380,8 +376,8 @@ public class McidasSpatialFactory {
|
|||
ProjectedCRS crs = null;
|
||||
|
||||
// Get the correct CRS
|
||||
if (mapProjection == 7585) {
|
||||
crs = constructCRS(type, encodedNav);
|
||||
if (mapProjection == McidasMapCoverage.GVAR) {
|
||||
crs = McidasCRSBuilder.constructCRS(type, encodedNav);
|
||||
}
|
||||
|
||||
// Construct the polygon constructor String
|
||||
|
@ -413,66 +409,8 @@ public class McidasSpatialFactory {
|
|||
return new String(coded);
|
||||
}
|
||||
|
||||
public ProjectedCRS constructCRSfromWKT( String crsWKT) {
|
||||
Pattern p = Pattern.compile("PROJCS\\[\"MCIDAS\\sAREA\\s(.*)\"");
|
||||
Matcher m = p.matcher(crsWKT);
|
||||
m.find();
|
||||
ProjectedCRS crsObject=null;
|
||||
|
||||
if ( m.groupCount() == 1 ) {
|
||||
String type = m.group(1);
|
||||
//System.out.println("FOUND PROJCS:"+m.group(0)+":"+type);
|
||||
p = Pattern.compile("\\[\"NAV_BLOCK_BASE64\",\\s\"(.*)\"\\]");
|
||||
m = p.matcher(crsWKT);
|
||||
boolean found = m.find();
|
||||
|
||||
//System.out.println(m.group());
|
||||
//System.out.println(m.groupCount()+m.group(1));
|
||||
if ( found ) {
|
||||
String navBlock = m.group(1);
|
||||
crsObject = McidasSpatialFactory.getInstance().constructCRS(type, navBlock);
|
||||
}
|
||||
}
|
||||
|
||||
return crsObject;
|
||||
}
|
||||
|
||||
public ProjectedCRS constructCRS(String type, String encoded) {
|
||||
|
||||
ParameterValueGroup pvg = null;
|
||||
|
||||
DefaultMathTransformFactory dmtFactory = new DefaultMathTransformFactory();
|
||||
try {
|
||||
pvg = dmtFactory.getDefaultParameters("MCIDAS_AREA_NAV");
|
||||
} catch (NoSuchIdentifierException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
/*
|
||||
* semi_major and semi_minor parameters are set to 1, so that no global
|
||||
* scaling is performed during coordinate transforms by
|
||||
* org.geotools.referencing.operation.projection.MapProjection based on
|
||||
* the radius of earth
|
||||
*/
|
||||
pvg.parameter("semi_major").setValue(1.0);
|
||||
pvg.parameter("semi_minor").setValue(1.0);
|
||||
pvg.parameter("central_meridian").setValue(0.0);
|
||||
// pvg.parameter("scale_factor").setValue(1.0);
|
||||
|
||||
pvg.parameter("NAV_BLOCK_BASE64").setValue(encoded);
|
||||
// System.out.println(pvg.toString() );
|
||||
|
||||
String projectionName = "MCIDAS AREA " + type;
|
||||
ProjectedCRS mcidasCRS = null;
|
||||
try {
|
||||
mcidasCRS = MapUtil.constructProjection(projectionName, pvg);
|
||||
} catch (NoSuchIdentifierException e) {
|
||||
e.printStackTrace();
|
||||
} catch (FactoryException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return mcidasCRS;
|
||||
public ProjectedCRS constructCRSfromWKT(String crsWKT) {
|
||||
return McidasCRSBuilder.constructCRSfromWKT(crsWKT);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
* remove xml serialization as well *
|
||||
* 10/2011 S. Gurung Added changes related to getting stid/lat/lon/elev
|
||||
* from database instead of snstns.xml file
|
||||
*
|
||||
* 6/2014 T.Lee Added support XXAA, XXBB, XXCC, XXDD
|
||||
* </pre>
|
||||
*
|
||||
* This code has been developed by the SIB for use in the AWIPS2 system.
|
||||
|
@ -26,6 +26,12 @@
|
|||
|
||||
package gov.noaa.nws.ncep.common.dataplugin.ncuair.dao;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairLiftedIndex;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairMaxWind;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairObsLevels;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairRecord;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairTropopause;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
|
@ -37,109 +43,213 @@ import com.raytheon.uf.common.pointdata.PointDataContainer;
|
|||
import com.raytheon.uf.common.pointdata.PointDataDescription;
|
||||
import com.raytheon.uf.common.pointdata.PointDataView;
|
||||
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairLiftedIndex;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairMaxWind;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairObsLevels;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairTropopause;
|
||||
import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairRecord;
|
||||
|
||||
public class NcUairToPointData {
|
||||
private static final String RAW_DATA = "RAWDATA";
|
||||
private static final String RAW_DATA = "RAWDATA";
|
||||
|
||||
private static final String UTC = "UTC";
|
||||
|
||||
private static final String REPORT_TYPE = "REPORTTYPE";
|
||||
|
||||
private static final String STATION_ID = "STATIONID";
|
||||
|
||||
private static final String ISSUE_TIME = "ISSUETIME";
|
||||
private static final String OBS_TIME = "OBSTIME";
|
||||
|
||||
private static final String OBS_TIME = "OBSTIME";
|
||||
|
||||
private static final String SYNOPTIC_TIME = "SYNOPTIME";
|
||||
|
||||
private static final String DATAURI = "DATAURI";
|
||||
private static final String DATA_TYPE = "DATATYPE";
|
||||
|
||||
private static final String DATA_TYPE = "DATATYPE";
|
||||
|
||||
private static final String CORRECTION_INDICATOR = "CORR";
|
||||
|
||||
private static final String WMO_HEADER = "WMOHEADER";
|
||||
|
||||
private static final String STATION_NUMBER = "STNUM";
|
||||
|
||||
private static final String LATITUDE = "LATITUDE";
|
||||
|
||||
private static final String LONGITUDE = "LONGITUDE";
|
||||
|
||||
private static final String ELEVATION = "ELEVATION";
|
||||
|
||||
private static final String NIL = "NIL";
|
||||
|
||||
private static final String TTAA_PRES = "TTAA_PRES";
|
||||
|
||||
private static final String TTAA_TEMP = "TTAA_TEMP";
|
||||
|
||||
private static final String TTAA_DWPT = "TTAA_DWPT";
|
||||
|
||||
private static final String TTAA_DRCT = "TTAA_DRCT";
|
||||
|
||||
private static final String TTAA_SPED = "TTAA_SPED";
|
||||
|
||||
private static final String TTAA_HGHT = "TTAA_HGHT";
|
||||
|
||||
private static final String TTBB_PRES = "TTBB_PRES";
|
||||
|
||||
private static final String TTBB_TEMP = "TTBB_TEMP";
|
||||
|
||||
private static final String TTBB_DWPT = "TTBB_DWPT";
|
||||
|
||||
private static final String TTCC_PRES = "TTCC_PRES";
|
||||
|
||||
private static final String TTCC_TEMP = "TTCC_TEMP";
|
||||
|
||||
private static final String TTCC_DWPT = "TTCC_DWPT";
|
||||
|
||||
private static final String TTCC_DRCT = "TTCC_DRCT";
|
||||
|
||||
private static final String TTCC_SPED = "TTCC_SPED";
|
||||
|
||||
private static final String TTCC_HGHT = "TTCC_HGHT";
|
||||
|
||||
private static final String TTDD_PRES = "TTDD_PRES";
|
||||
|
||||
private static final String TTDD_TEMP = "TTDD_TEMP";
|
||||
|
||||
private static final String TTDD_DWPT = "TTDD_DWPT";
|
||||
|
||||
private static final String PPAA_PRES = "PPAA_PRES";
|
||||
|
||||
private static final String PPAA_TEMP = "PPAA_TEMP";
|
||||
|
||||
private static final String PPAA_DWPT = "PPAA_DWPT";
|
||||
private static final String PPAA_DRCT = "PPAA_DRCT";
|
||||
private static final String PPAA_SPED = "PPAA_SPED";
|
||||
|
||||
private static final String PPAA_DRCT = "PPAA_DRCT";
|
||||
|
||||
private static final String PPAA_SPED = "PPAA_SPED";
|
||||
|
||||
private static final String PPAA_HGHT = "PPAA_HGHT";
|
||||
|
||||
private static final String PPBB_DRCT = "PPBB_DRCT";
|
||||
|
||||
private static final String PPBB_SPED = "PPBB_SPED";
|
||||
|
||||
private static final String PPBB_HGHT = "PPBB_HGHT";
|
||||
|
||||
private static final String PPCC_PRES = "PPCC_PRES";
|
||||
|
||||
private static final String PPCC_TEMP = "PPCC_TEMP";
|
||||
|
||||
private static final String PPCC_DWPT = "PPCC_DWPT";
|
||||
|
||||
private static final String PPCC_DRCT = "PPCC_DRCT";
|
||||
|
||||
private static final String PPCC_SPED = "PPCC_SPED";
|
||||
|
||||
private static final String PPCC_HGHT = "PPCC_HGHT";
|
||||
|
||||
private static final String PPDD_DRCT = "PPDD_DRCT";
|
||||
|
||||
private static final String PPDD_SPED = "PPDD_SPED";
|
||||
|
||||
private static final String PPDD_HGHT = "PPDD_HGHT";
|
||||
|
||||
private static final String TROP_PRES = "TROP_PRES";
|
||||
|
||||
private static final String TROP_TEMP = "TROP_TEMP";
|
||||
|
||||
private static final String TROP_DWPT = "TROP_DWPT";
|
||||
|
||||
private static final String TROP_DRCT = "TROP_DRCT";
|
||||
|
||||
private static final String TROP_SPED = "TROP_SPED";
|
||||
|
||||
private static final String WMAX_PRES = "WMAX_PRES";
|
||||
|
||||
private static final String WMAX_DRCT = "WMAX_DRCT";
|
||||
|
||||
private static final String WMAX_SPED = "WMAX_SPED";
|
||||
|
||||
private static final String WMAX_LO_SHEAR = "WMAX_LO_SHEAR";
|
||||
|
||||
private static final String WMAX_HI_SHEAR = "WMAX_HI_SHEAR";
|
||||
|
||||
private static final String TTAA_LIFT = "TTAA_LIFT";
|
||||
|
||||
private static final String TTAA_LO_MEAN_DRCT = "TTAA_LO_MEAN_DRCT";
|
||||
|
||||
private static final String TTAA_LO_MEAN_SPED = "TTAA_LO_MEAN_SPED";
|
||||
|
||||
private static final String TTAA_HI_MEAN_DRCT = "TTAA_HI_MEAN_DRCT";
|
||||
|
||||
private static final String TTAA_HI_MEAN_SPED = "TTAA_HI_MEAN_SPED";
|
||||
|
||||
private static final String TTBB_LIFT = "TTBB_LIFT";
|
||||
|
||||
private static final String TTBB_LO_MEAN_DRCT = "TTBB_LO_MEAN_DRCT";
|
||||
|
||||
private static final String TTBB_LO_MEAN_SPED = "TTBB_LO_MEAN_SPED";
|
||||
|
||||
private static final String TTBB_HI_MEAN_DRCT = "TTBB_HI_MEAN_DRCT";
|
||||
|
||||
private static final String TTBB_HI_MEAN_SPED = "TTBB_HI_MEAN_SPED";
|
||||
|
||||
private enum REPORTTYPE {TTAA, TTBB, TTCC, TTDD, PPAA, PPBB, PPCC, PPDD,
|
||||
UUAA, UUBB, UUCC, UUDD, XXAA, XXBB, XXCC, XXDD};
|
||||
|
||||
private static final String XXAA_PRES = "XXAA_PRES";
|
||||
|
||||
private static final String XXAA_TEMP = "XXAA_TEMP";
|
||||
|
||||
private static final String XXAA_DWPT = "XXAA_DWPT";
|
||||
|
||||
private static final String XXAA_DRCT = "XXAA_DRCT";
|
||||
|
||||
private static final String XXAA_SPED = "XXAA_SPED";
|
||||
|
||||
private static final String XXAA_HGHT = "XXAA_HGHT";
|
||||
|
||||
private static final String XXBB_PRES = "XXBB_PRES";
|
||||
|
||||
private static final String XXBB_TEMP = "XXBB_TEMP";
|
||||
|
||||
private static final String XXBB_DWPT = "XXBB_DWPT";
|
||||
|
||||
private static final String XXCC_PRES = "XXCC_PRES";
|
||||
|
||||
private static final String XXCC_TEMP = "XXCC_TEMP";
|
||||
|
||||
private static final String XXCC_DWPT = "XXCC_DWPT";
|
||||
|
||||
private static final String XXCC_DRCT = "XXCC_DRCT";
|
||||
|
||||
private static final String XXCC_SPED = "XXCC_SPED";
|
||||
|
||||
private static final String XXCC_HGHT = "XXCC_HGHT";
|
||||
|
||||
private static final String XXDD_PRES = "XXDD_PRES";
|
||||
|
||||
private static final String XXDD_TEMP = "XXDD_TEMP";
|
||||
|
||||
private static final String XXDD_DWPT = "XXDD_DWPT";
|
||||
|
||||
private enum REPORTTYPE {
|
||||
TTAA, TTBB, TTCC, TTDD, PPAA, PPBB, PPCC, PPDD, UUAA, UUBB, UUCC, UUDD, XXAA, XXBB, XXCC, XXDD
|
||||
};
|
||||
|
||||
/**
|
||||
* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! It is important to
|
||||
* keep this up to date or risk breaking backwards compatibility
|
||||
* uu
|
||||
* keep this up to date or risk breaking backwards compatibility uu
|
||||
*/
|
||||
private static final String[] ALL_PARAMS = { RAW_DATA, UTC, REPORT_TYPE, STATION_ID,
|
||||
ISSUE_TIME, OBS_TIME, SYNOPTIC_TIME, DATAURI, DATA_TYPE, CORRECTION_INDICATOR,
|
||||
WMO_HEADER, STATION_NUMBER, LATITUDE, LONGITUDE, ELEVATION, NIL,
|
||||
TTAA_PRES, TTAA_TEMP, TTAA_DWPT, TTAA_DRCT, TTAA_SPED, TTAA_HGHT,
|
||||
TTBB_PRES, TTBB_TEMP, TTBB_DWPT,
|
||||
TTCC_PRES, TTCC_TEMP, TTCC_DWPT, TTCC_DRCT, TTCC_SPED, TTCC_HGHT,
|
||||
TTDD_PRES, TTDD_TEMP, TTDD_DWPT,
|
||||
PPAA_PRES, PPAA_TEMP, PPAA_DWPT, PPAA_DRCT, PPAA_SPED, PPAA_HGHT,
|
||||
PPBB_DRCT, PPBB_SPED, PPBB_HGHT,
|
||||
PPCC_PRES, PPCC_TEMP, PPCC_DWPT, PPCC_DRCT, PPCC_SPED, PPCC_HGHT,
|
||||
PPDD_DRCT, PPDD_SPED, PPDD_HGHT,
|
||||
TROP_PRES, TROP_TEMP, TROP_DWPT, TROP_DRCT, TROP_SPED, WMAX_PRES,
|
||||
WMAX_DRCT, WMAX_SPED, WMAX_LO_SHEAR, WMAX_HI_SHEAR,
|
||||
TTAA_LIFT, TTAA_LO_MEAN_DRCT, TTAA_LO_MEAN_SPED, TTAA_HI_MEAN_DRCT, TTAA_HI_MEAN_SPED,
|
||||
TTBB_LIFT, TTBB_LO_MEAN_DRCT, TTBB_LO_MEAN_SPED, TTBB_HI_MEAN_DRCT, TTAA_HI_MEAN_SPED,};
|
||||
private static final String[] ALL_PARAMS = { RAW_DATA, UTC, REPORT_TYPE,
|
||||
STATION_ID, ISSUE_TIME, OBS_TIME, SYNOPTIC_TIME, DATAURI,
|
||||
DATA_TYPE, CORRECTION_INDICATOR, WMO_HEADER, STATION_NUMBER,
|
||||
LATITUDE, LONGITUDE, ELEVATION, NIL, TTAA_PRES, TTAA_TEMP,
|
||||
TTAA_DWPT, TTAA_DRCT, TTAA_SPED, TTAA_HGHT, TTBB_PRES, TTBB_TEMP,
|
||||
TTBB_DWPT, TTCC_PRES, TTCC_TEMP, TTCC_DWPT, TTCC_DRCT, TTCC_SPED,
|
||||
TTCC_HGHT, TTDD_PRES, TTDD_TEMP, TTDD_DWPT, PPAA_PRES, PPAA_TEMP,
|
||||
PPAA_DWPT, PPAA_DRCT, PPAA_SPED, PPAA_HGHT, PPBB_DRCT, PPBB_SPED,
|
||||
PPBB_HGHT, PPCC_PRES, PPCC_TEMP, PPCC_DWPT, PPCC_DRCT, PPCC_SPED,
|
||||
PPCC_HGHT, PPDD_DRCT, PPDD_SPED, PPDD_HGHT, TROP_PRES, TROP_TEMP,
|
||||
TROP_DWPT, TROP_DRCT, TROP_SPED, WMAX_PRES, WMAX_DRCT, WMAX_SPED,
|
||||
WMAX_LO_SHEAR, WMAX_HI_SHEAR, TTAA_LIFT, TTAA_LO_MEAN_DRCT,
|
||||
TTAA_LO_MEAN_SPED, TTAA_HI_MEAN_DRCT, TTAA_HI_MEAN_SPED, TTBB_LIFT,
|
||||
TTBB_LO_MEAN_DRCT, TTBB_LO_MEAN_SPED, TTBB_HI_MEAN_DRCT,
|
||||
TTAA_HI_MEAN_SPED, XXAA_PRES, XXAA_TEMP, XXAA_DWPT, XXAA_DRCT,
|
||||
XXAA_SPED, XXAA_HGHT, XXBB_PRES, XXBB_TEMP, XXBB_DWPT, XXCC_PRES,
|
||||
XXCC_TEMP, XXCC_DWPT, XXCC_DRCT, XXCC_SPED, XXCC_HGHT, XXDD_PRES,
|
||||
XXDD_TEMP, XXDD_DWPT };
|
||||
|
||||
public static final String ALL_PARAMS_LIST;
|
||||
static {
|
||||
|
@ -171,8 +281,9 @@ public class NcUairToPointData {
|
|||
}
|
||||
|
||||
public PluginDataObject[] toPointData(PluginDataObject[] pdo) {
|
||||
//long curTime = System.currentTimeMillis();
|
||||
//System.out.println("H5 uair toPointData entered, pdo size = "+ pdo.length);
|
||||
// long curTime = System.currentTimeMillis();
|
||||
// System.out.println("H5 uair toPointData entered, pdo size = "+
|
||||
// pdo.length);
|
||||
if (pdo.length > 0) {
|
||||
Map<File, PointDataContainer> pointMap = new HashMap<File, PointDataContainer>();
|
||||
|
||||
|
@ -181,7 +292,7 @@ public class NcUairToPointData {
|
|||
continue;
|
||||
File f = this.dao.getFullFilePath(p);
|
||||
PointDataContainer pdc = pointMap.get(f);
|
||||
|
||||
|
||||
if (pdc == null) {
|
||||
pdc = PointDataContainer.build(this.pdd);
|
||||
pointMap.put(f, pdc);
|
||||
|
@ -189,310 +300,389 @@ public class NcUairToPointData {
|
|||
NcUairRecord mr = (NcUairRecord) p;
|
||||
PointDataView pdv = buildView(pdc, mr);
|
||||
mr.setPointDataView(pdv);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
//long enqueueTime = System.currentTimeMillis();
|
||||
//double latency = (enqueueTime-curTime) ;
|
||||
//System.out.println("H5 uair toPointData spend "+ latency);
|
||||
// long enqueueTime = System.currentTimeMillis();
|
||||
// double latency = (enqueueTime-curTime) ;
|
||||
// System.out.println("H5 uair toPointData spend "+ latency);
|
||||
return pdo;
|
||||
}
|
||||
|
||||
|
||||
private PointDataView buildView(PointDataContainer container,
|
||||
NcUairRecord record) {
|
||||
|
||||
PointDataView pdv = container.append();
|
||||
pdv.setString(STATION_ID, record.getStationId());
|
||||
|
||||
|
||||
int maxManLevel = -1;
|
||||
int maxSigTempLevel = -1;
|
||||
int maxSigWindLevel = -1;
|
||||
int maxWmaxLevel = -1;
|
||||
int maxTropLevel = -1;
|
||||
int maxMiscLevel = -1;
|
||||
|
||||
Dimension [] dims = pdd.dimensions;
|
||||
for(Dimension d : dims) {
|
||||
|
||||
if("maxManLevel".equals(d.getDimensionName())) {
|
||||
Dimension[] dims = pdd.dimensions;
|
||||
for (Dimension d : dims) {
|
||||
|
||||
if ("maxManLevel".equals(d.getDimensionName())) {
|
||||
maxManLevel = d.getDimensionLength();
|
||||
}
|
||||
|
||||
if("maxSigTempLevel".equals(d.getDimensionName())) {
|
||||
|
||||
if ("maxSigTempLevel".equals(d.getDimensionName())) {
|
||||
maxSigTempLevel = d.getDimensionLength();
|
||||
}
|
||||
|
||||
if("maxSigWindLevel".equals(d.getDimensionName())) {
|
||||
|
||||
if ("maxSigWindLevel".equals(d.getDimensionName())) {
|
||||
maxSigWindLevel = d.getDimensionLength();
|
||||
}
|
||||
|
||||
if("maxWmaxLevel".equals(d.getDimensionName())) {
|
||||
|
||||
if ("maxWmaxLevel".equals(d.getDimensionName())) {
|
||||
maxWmaxLevel = d.getDimensionLength();
|
||||
}
|
||||
|
||||
if("maxTropLevel".equals(d.getDimensionName())) {
|
||||
|
||||
if ("maxTropLevel".equals(d.getDimensionName())) {
|
||||
maxTropLevel = d.getDimensionLength();
|
||||
}
|
||||
|
||||
if("maxMiscLevel".equals(d.getDimensionName())) {
|
||||
|
||||
if ("maxMiscLevel".equals(d.getDimensionName())) {
|
||||
maxMiscLevel = d.getDimensionLength();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (record.getCorr() != null) {
|
||||
pdv.setString(CORRECTION_INDICATOR,record.getCorr());
|
||||
pdv.setString(CORRECTION_INDICATOR, record.getCorr());
|
||||
} else {
|
||||
pdv.setString(CORRECTION_INDICATOR, "");
|
||||
}
|
||||
|
||||
if (record.getLocation()!=null) {
|
||||
pdv.setFloat(LATITUDE, (float) record.getLatitude());
|
||||
pdv.setFloat(LONGITUDE, (float) record.getLongitude());
|
||||
if(record.getElevation()!=null)
|
||||
pdv.setFloat(ELEVATION, (float) record.getElevation());
|
||||
if (record.getLocation() != null) {
|
||||
pdv.setFloat(LATITUDE, (float) record.getLatitude());
|
||||
pdv.setFloat(LONGITUDE, (float) record.getLongitude());
|
||||
if (record.getElevation() != null)
|
||||
pdv.setFloat(ELEVATION, (float) record.getElevation());
|
||||
}
|
||||
if(record.getObsTime()!=null)
|
||||
pdv.setLong(OBS_TIME, record.getObsTime().getTime().getTime());
|
||||
if(record.getSynopticTime()!=null)
|
||||
pdv.setLong(SYNOPTIC_TIME, record.getSynopticTime().getTime().getTime());
|
||||
if(record.getIssueTime()!=null)
|
||||
pdv.setLong(ISSUE_TIME, record.getIssueTime().getTime().getTime());
|
||||
pdv.setString(DATAURI, record.getDataURI());
|
||||
pdv.setString(DATA_TYPE, record.getDataType());
|
||||
if (record.getObsTime() != null)
|
||||
pdv.setLong(OBS_TIME, record.getObsTime().getTime().getTime());
|
||||
if (record.getSynopticTime() != null)
|
||||
pdv.setLong(SYNOPTIC_TIME, record.getSynopticTime().getTime()
|
||||
.getTime());
|
||||
if (record.getIssueTime() != null)
|
||||
pdv.setLong(ISSUE_TIME, record.getIssueTime().getTime().getTime());
|
||||
pdv.setString(DATAURI, record.getDataURI());
|
||||
pdv.setString(DATA_TYPE, record.getDataType());
|
||||
pdv.setString(REPORT_TYPE, record.getReportType());
|
||||
pdv.setString(WMO_HEADER, record.getWmoHeader());
|
||||
pdv.setString(STATION_NUMBER, record.getStnum());
|
||||
if(record.getNil()!=null)
|
||||
pdv.setString(NIL, record.getNil().toString());
|
||||
if (record.getNil() != null)
|
||||
pdv.setString(NIL, record.getNil().toString());
|
||||
pdv.setInt(UTC, record.getUTC());
|
||||
pdv.setString(RAW_DATA, record.getBullMessage());
|
||||
int index;
|
||||
|
||||
if ( record.getObsLevels() != null ) {
|
||||
|
||||
if (record.getObsLevels() != null) {
|
||||
Iterator<NcUairObsLevels> sls = record.getObsLevels().iterator();
|
||||
switch ( REPORTTYPE.valueOf(record.getDataType()) ) {
|
||||
|
||||
switch (REPORTTYPE.valueOf(record.getDataType())) {
|
||||
|
||||
case TTAA:
|
||||
case UUAA:
|
||||
case XXAA:
|
||||
|
||||
index = 0;
|
||||
if (sls !=null ) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getPres() > 0 && index < maxManLevel ) {
|
||||
pdv.setFloat(TTAA_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTAA_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(TTAA_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTAA_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(TTAA_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(TTAA_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTAA", index);
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxManLevel) {
|
||||
pdv.setFloat(TTAA_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTAA_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(TTAA_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTAA_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(TTAA_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(TTAA_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTAA", index);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case TTBB:
|
||||
case XXAA:
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxManLevel) {
|
||||
pdv.setFloat(XXAA_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(XXAA_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(XXAA_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(XXAA_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(XXAA_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(XXAA_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numXXAA", index);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case TTBB:
|
||||
case UUBB:
|
||||
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxSigTempLevel) {
|
||||
pdv.setFloat(TTBB_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTBB_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTBB_DWPT, sl.getDwpt(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTBB", index);
|
||||
}
|
||||
break;
|
||||
case XXBB:
|
||||
index = 0;
|
||||
if ( sls != null ) {
|
||||
while (sls.hasNext() ) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getPres() > 0 && index < maxSigTempLevel ) {
|
||||
pdv.setFloat(TTBB_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTBB_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTBB_DWPT, sl.getDwpt(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTBB", index);
|
||||
}
|
||||
break;
|
||||
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxSigTempLevel) {
|
||||
pdv.setFloat(XXBB_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(XXBB_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(XXBB_DWPT, sl.getDwpt(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numXXBB", index);
|
||||
}
|
||||
break;
|
||||
|
||||
case TTCC:
|
||||
case UUCC:
|
||||
case XXCC:
|
||||
index = 0;
|
||||
if ( sls != null ) {
|
||||
while (sls.hasNext() ) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getPres() > 0 && index < maxManLevel ) {
|
||||
|
||||
pdv.setFloat(TTCC_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTCC_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(TTCC_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTCC_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(TTCC_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(TTCC_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTCC",index);
|
||||
}
|
||||
break;
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxManLevel) {
|
||||
|
||||
pdv.setFloat(TTCC_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTCC_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(TTCC_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTCC_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(TTCC_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(TTCC_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTCC", index);
|
||||
}
|
||||
break;
|
||||
case XXCC:
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxManLevel) {
|
||||
|
||||
pdv.setFloat(XXCC_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(XXCC_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(XXCC_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(XXCC_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(XXCC_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(XXCC_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numXXCC", index);
|
||||
}
|
||||
break;
|
||||
|
||||
case TTDD:
|
||||
case UUDD:
|
||||
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxSigTempLevel) {
|
||||
pdv.setFloat(TTDD_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTDD_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTDD_DWPT, sl.getDwpt(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTDD", index);
|
||||
}
|
||||
break;
|
||||
case XXDD:
|
||||
index = 0;
|
||||
if ( sls != null ) {
|
||||
while (sls.hasNext() ) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getPres() > 0 && index < maxSigTempLevel ) {
|
||||
pdv.setFloat(TTDD_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(TTDD_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(TTDD_DWPT, sl.getDwpt(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTTDD", index);
|
||||
}
|
||||
break;
|
||||
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxSigTempLevel) {
|
||||
pdv.setFloat(XXDD_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(XXDD_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(XXDD_DWPT, sl.getDwpt(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numXXDD", index);
|
||||
}
|
||||
break;
|
||||
|
||||
case PPAA:
|
||||
index = 0;
|
||||
if ( sls != null ) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getPres() > 0 && index < maxManLevel ) {
|
||||
pdv.setFloat(PPAA_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(PPAA_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPAA_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(PPAA_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(PPAA_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPAA_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numPPAA", index);
|
||||
}
|
||||
break;
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getPres() > 0 && index < maxManLevel) {
|
||||
pdv.setFloat(PPAA_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(PPAA_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPAA_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(PPAA_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(PPAA_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPAA_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numPPAA", index);
|
||||
}
|
||||
break;
|
||||
case PPBB:
|
||||
index = 0;
|
||||
if ( sls != null ) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getHght() > 0 && index < maxSigWindLevel ) {
|
||||
pdv.setFloat(PPBB_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPBB_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPBB_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt ("numPPBB", index);
|
||||
}
|
||||
break;
|
||||
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getHght() > 0 && index < maxSigWindLevel) {
|
||||
pdv.setFloat(PPBB_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPBB_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPBB_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numPPBB", index);
|
||||
}
|
||||
break;
|
||||
|
||||
case PPCC:
|
||||
index = 0;
|
||||
if (sls !=null ) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( (sl.getPres() > 0 || sl.getHght() > 0 ) && index < maxManLevel ) {
|
||||
pdv.setFloat(PPCC_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(PPCC_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPCC_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(PPCC_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(PPCC_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPCC_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ((sl.getPres() > 0 || sl.getHght() > 0)
|
||||
&& index < maxManLevel) {
|
||||
pdv.setFloat(PPCC_PRES, sl.getPres(), index);
|
||||
pdv.setFloat(PPCC_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPCC_TEMP, sl.getTemp(), index);
|
||||
pdv.setFloat(PPCC_DWPT, sl.getDwpt(), index);
|
||||
pdv.setFloat(PPCC_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPCC_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
|
||||
}
|
||||
}
|
||||
pdv.setInt("numPPCC", index);
|
||||
}
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
pdv.setInt ("numPPCC", index);
|
||||
}
|
||||
break;
|
||||
|
||||
case PPDD:
|
||||
index = 0;
|
||||
if (sls !=null ) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if ( sl.getHght() > 0 && index < maxSigWindLevel ) {
|
||||
pdv.setFloat(PPDD_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPDD_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPDD_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
index = 0;
|
||||
if (sls != null) {
|
||||
while (sls.hasNext()) {
|
||||
NcUairObsLevels sl = sls.next();
|
||||
if (sl.getHght() > 0 && index < maxSigWindLevel) {
|
||||
pdv.setFloat(PPDD_HGHT, sl.getHght(), index);
|
||||
pdv.setFloat(PPDD_DRCT, sl.getDrct(), index);
|
||||
pdv.setFloat(PPDD_SPED, sl.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
|
||||
}
|
||||
pdv.setInt ("numPPDD", index);
|
||||
}
|
||||
}
|
||||
pdv.setInt("numPPDD", index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ( record.getTropopause () != null ) {
|
||||
Iterator<NcUairTropopause> trops = record.getTropopause().iterator();
|
||||
index = 0;
|
||||
if (trops != null) {
|
||||
while (trops.hasNext()) {
|
||||
NcUairTropopause trop = trops.next();
|
||||
if ( trop.getPres() > 0 && index < maxTropLevel ) {
|
||||
pdv.setFloat(TROP_PRES, trop.getPres(), index);
|
||||
pdv.setFloat(TROP_TEMP, trop.getTemp(), index);
|
||||
pdv.setFloat(TROP_DWPT, trop.getDwpt(), index);
|
||||
pdv.setFloat(TROP_DRCT, trop.getDrct(), index);
|
||||
pdv.setFloat(TROP_SPED, trop.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt ("numTrop", index);
|
||||
}
|
||||
}
|
||||
if (record.getTropopause() != null) {
|
||||
Iterator<NcUairTropopause> trops = record.getTropopause()
|
||||
.iterator();
|
||||
index = 0;
|
||||
if (trops != null) {
|
||||
while (trops.hasNext()) {
|
||||
NcUairTropopause trop = trops.next();
|
||||
if (trop.getPres() > 0 && index < maxTropLevel) {
|
||||
pdv.setFloat(TROP_PRES, trop.getPres(), index);
|
||||
pdv.setFloat(TROP_TEMP, trop.getTemp(), index);
|
||||
pdv.setFloat(TROP_DWPT, trop.getDwpt(), index);
|
||||
pdv.setFloat(TROP_DRCT, trop.getDrct(), index);
|
||||
pdv.setFloat(TROP_SPED, trop.getSped(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numTrop", index);
|
||||
}
|
||||
}
|
||||
|
||||
if (record.getMaxWind() != null ) {
|
||||
Iterator<NcUairMaxWind> wmaxs = record.getMaxWind().iterator();
|
||||
index = 0;
|
||||
if (wmaxs != null) {
|
||||
while (wmaxs.hasNext()) {
|
||||
NcUairMaxWind wmax = wmaxs.next();
|
||||
if ( wmax.getPres() > 0 && index < maxWmaxLevel ) {
|
||||
pdv.setFloat(WMAX_PRES, wmax.getPres(), index);
|
||||
pdv.setFloat(WMAX_DRCT, wmax.getDrct(), index);
|
||||
pdv.setFloat(WMAX_SPED, wmax.getSped(), index);
|
||||
pdv.setFloat(WMAX_LO_SHEAR, wmax.getLoShear(), index);
|
||||
pdv.setFloat(WMAX_HI_SHEAR, wmax.getHiShear(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt ("numWmax", index);
|
||||
}
|
||||
}
|
||||
if (record.getMaxWind() != null) {
|
||||
Iterator<NcUairMaxWind> wmaxs = record.getMaxWind().iterator();
|
||||
index = 0;
|
||||
if (wmaxs != null) {
|
||||
while (wmaxs.hasNext()) {
|
||||
NcUairMaxWind wmax = wmaxs.next();
|
||||
if (wmax.getPres() > 0 && index < maxWmaxLevel) {
|
||||
pdv.setFloat(WMAX_PRES, wmax.getPres(), index);
|
||||
pdv.setFloat(WMAX_DRCT, wmax.getDrct(), index);
|
||||
pdv.setFloat(WMAX_SPED, wmax.getSped(), index);
|
||||
pdv.setFloat(WMAX_LO_SHEAR, wmax.getLoShear(), index);
|
||||
pdv.setFloat(WMAX_HI_SHEAR, wmax.getHiShear(), index);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
pdv.setInt("numWmax", index);
|
||||
}
|
||||
}
|
||||
|
||||
if (record.getLiftedIndex() != null) {
|
||||
Iterator<NcUairLiftedIndex> miscs = record.getLiftedIndex().iterator();
|
||||
index = 0;
|
||||
if ( miscs != null) {
|
||||
while (miscs.hasNext()) {
|
||||
NcUairLiftedIndex misc = miscs.next();
|
||||
if ( index < maxMiscLevel ) {
|
||||
if ( record.getDataType().equals("TTAA") ) {
|
||||
pdv.setFloat(TTAA_LIFT, misc.getLiTemp(), index);
|
||||
pdv.setFloat(TTAA_LO_MEAN_DRCT, misc.getLoDrct(), index);
|
||||
pdv.setFloat(TTAA_LO_MEAN_SPED, misc.getLoSped(), index);
|
||||
pdv.setFloat(TTAA_HI_MEAN_DRCT, misc.getHiDrct(), index);
|
||||
pdv.setFloat(TTAA_HI_MEAN_SPED, misc.getHiSped(), index);
|
||||
} else {
|
||||
pdv.setFloat(TTBB_LIFT, misc.getLiTemp(), index);
|
||||
pdv.setFloat(TTBB_LO_MEAN_DRCT, misc.getLoDrct(), index);
|
||||
pdv.setFloat(TTBB_LO_MEAN_SPED, misc.getLoSped(), index);
|
||||
pdv.setFloat(TTBB_HI_MEAN_DRCT, misc.getHiDrct(), index);
|
||||
pdv.setFloat(TTBB_HI_MEAN_SPED, misc.getHiSped(), index);
|
||||
}
|
||||
index++;
|
||||
}
|
||||
}
|
||||
if ( record.getDataType().equals ("TTAA")) {
|
||||
pdv.setInt ("numMiscTTAA", index);
|
||||
} else {
|
||||
pdv.setInt ("numMiscTTBB", index);
|
||||
}
|
||||
}
|
||||
}
|
||||
return pdv;
|
||||
if (record.getLiftedIndex() != null) {
|
||||
Iterator<NcUairLiftedIndex> miscs = record.getLiftedIndex()
|
||||
.iterator();
|
||||
index = 0;
|
||||
if (miscs != null) {
|
||||
while (miscs.hasNext()) {
|
||||
NcUairLiftedIndex misc = miscs.next();
|
||||
if (index < maxMiscLevel) {
|
||||
if (record.getDataType().equals("TTAA")) {
|
||||
pdv.setFloat(TTAA_LIFT, misc.getLiTemp(), index);
|
||||
pdv.setFloat(TTAA_LO_MEAN_DRCT, misc.getLoDrct(),
|
||||
index);
|
||||
pdv.setFloat(TTAA_LO_MEAN_SPED, misc.getLoSped(),
|
||||
index);
|
||||
pdv.setFloat(TTAA_HI_MEAN_DRCT, misc.getHiDrct(),
|
||||
index);
|
||||
pdv.setFloat(TTAA_HI_MEAN_SPED, misc.getHiSped(),
|
||||
index);
|
||||
} else {
|
||||
pdv.setFloat(TTBB_LIFT, misc.getLiTemp(), index);
|
||||
pdv.setFloat(TTBB_LO_MEAN_DRCT, misc.getLoDrct(),
|
||||
index);
|
||||
pdv.setFloat(TTBB_LO_MEAN_SPED, misc.getLoSped(),
|
||||
index);
|
||||
pdv.setFloat(TTBB_HI_MEAN_DRCT, misc.getHiDrct(),
|
||||
index);
|
||||
pdv.setFloat(TTBB_HI_MEAN_SPED, misc.getHiSped(),
|
||||
index);
|
||||
}
|
||||
index++;
|
||||
}
|
||||
}
|
||||
if (record.getDataType().equals("TTAA")) {
|
||||
pdv.setInt("numMiscTTAA", index);
|
||||
} else {
|
||||
pdv.setInt("numMiscTTBB", index);
|
||||
}
|
||||
}
|
||||
}
|
||||
return pdv;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -17,6 +17,7 @@ import com.raytheon.uf.common.serialization.comm.IServerRequest;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 22, 2013 sgilbert Initial creation
|
||||
* Jan 25, 2014 jwu Set default in accordance with those in "Product"
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -27,31 +28,31 @@ import com.raytheon.uf.common.serialization.comm.IServerRequest;
|
|||
public class ActivityInfo implements IServerRequest {
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String activityName = "defaultName";
|
||||
private String activityName = "Default";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String activityType = "defaultType";
|
||||
private String activityType = "Default";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String activitySubtype = "defaultSubtype";
|
||||
private String activitySubtype = "";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String activityLabel = "defaultLabel";
|
||||
private String activityLabel = "Default";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String site = "site1";
|
||||
private String site = "";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String desk = "desk1";
|
||||
private String desk = "";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String forecaster = "person1";
|
||||
private String forecaster = "";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Calendar refTime = Calendar.getInstance();
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String mode = "Unknown";
|
||||
private String mode = "OPERATIONAL";
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String status = "Unknown";
|
||||
|
|
|
@ -34,6 +34,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* Jun 26, 2013 bhebbard Added SequenceGenerator annotation
|
||||
* Jul 22, 2013 1977 rjpeter Added getDataURI and annotations.
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* Jan,29, 2014 1105 jwu Added ACTIVITY_SUBTYPE
|
||||
* </pre>
|
||||
*
|
||||
* @author sgilbert
|
||||
|
@ -53,6 +54,8 @@ public class PgenRecord extends PersistablePluginDataObject {
|
|||
|
||||
public static final String ACTIVITY_TYPE = "activityType";
|
||||
|
||||
public static final String ACTIVITY_SUBTYPE = "activitySubtype";
|
||||
|
||||
public static final String ACTIVITY_LABEL = "activityLabel";
|
||||
|
||||
public static final String ACTIVITY_NAME = "activityName";
|
||||
|
|
|
@ -17,92 +17,124 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
|
||||
import com.raytheon.uf.common.localization.LocalizationContext;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.MultiPolygon;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
/**
|
||||
* Interface that contains all methods to load NCEP static data.
|
||||
* This interface is also used to look up the data provider service in client side, such as PGEN.
|
||||
* Interface that contains all methods to load NCEP static data. This interface
|
||||
* is also used to look up the data provider service in client side, such as
|
||||
* PGEN.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 02/12 ? B. Yin Initial Creation.
|
||||
* 02/12 ? B. Yin Initial Creation.
|
||||
* 05/12 734 J. Zeng add getAllRfcs() and getAllCwas()
|
||||
* 08/12 #770 Q. Zhou added loadContWatchNum()
|
||||
* 09/12 ? B. Yin Changed county cluster return type.
|
||||
* 01/13 #966 B. Yin Added methods to load bounds into.
|
||||
* 03/14 Trac 1112 S. Russell Added getActiveCounties()
|
||||
* </pre>
|
||||
*
|
||||
* @author B. Yin
|
||||
* @author B. Yin
|
||||
*/
|
||||
|
||||
public interface IStaticDataProvider {
|
||||
public StationTable getSfStnTbl();
|
||||
public StationTable getAnchorTbl();
|
||||
public StationTable getVorTbl();
|
||||
public StationTable getVolcanoTbl();
|
||||
public HashMap<String,Set<String>> getClstTbl();
|
||||
public List<SPCCounty> getSPCCounties();
|
||||
public SPCCounty findCounty( String fips);
|
||||
public List<SPCCounty> getCountiesInGeometry(Geometry geo );
|
||||
|
||||
public List<USState> getAllstates();
|
||||
public List<USState> loadStateTable();
|
||||
public ArrayList<USState> statesInGeometry(Geometry geo);
|
||||
|
||||
public List<Rfc> getAllRfcs();
|
||||
public List<Rfc> loadRfcTable();
|
||||
public ArrayList<Rfc> rfcsInGeometry(Geometry geo);
|
||||
public boolean isRfcLoaded();
|
||||
|
||||
public List<Cwa> getAllCwas();
|
||||
public List<Cwa> loadCwaTable();
|
||||
public ArrayList<Cwa> cwasInGeometry(Geometry geo);
|
||||
|
||||
public HashMap<String, String> getStateAbrvMap();
|
||||
|
||||
//localization
|
||||
public String getPgenLocalizationRoot();
|
||||
public String getFileAbsolutePath(String fileLoczlizationPath);
|
||||
public File getFile(String fileLoczlizationPath);
|
||||
public LocalizationFile getStaticLocalizationFile( String fileName );
|
||||
public LocalizationFile getLocalizationFile( LocalizationContext context, String fileName );
|
||||
public StationTable getSfStnTbl();
|
||||
|
||||
public LocalizationContext getLocalizationContext( LocalizationType type, LocalizationLevel level);
|
||||
|
||||
public File getStaticFile( String fname );
|
||||
public File getGeogFile();
|
||||
public File getSfcStnFile();
|
||||
public File getFirBoundsFile();
|
||||
public StationTable getAnchorTbl();
|
||||
|
||||
//This is for sigmet
|
||||
public List<Object[]> queryNcepDB(String field, String table);
|
||||
|
||||
//for TCA
|
||||
public HashMap<String, String> getZoneMap();
|
||||
public StationTable getVorTbl();
|
||||
|
||||
//for GFA
|
||||
public List<FAArea> getFAAreas();
|
||||
public List<FAArea> getFAAreaX();
|
||||
public List<FARegion> getFARegions();
|
||||
public List<GreatLake> getGreatLakes();
|
||||
public List<CostalWater> getCostalWaters();
|
||||
public StationTable getVolcanoTbl();
|
||||
|
||||
//for g2g
|
||||
public ArrayList<MultiPolygon> getG2GBounds(String tableAlias, String columnName, String columnValue);
|
||||
public HashMap<String, Set<String>> getClstTbl();
|
||||
|
||||
//for continuing watches
|
||||
public List<String> loadContWatchNum();
|
||||
|
||||
//for clipping bounds
|
||||
public List<String> getBoundsTableList();
|
||||
public List<String> getBoundsNames( String table );
|
||||
public Polygon loadBounds( String boundsTable, String boundsName );
|
||||
public List<SPCCounty> getSPCCounties();
|
||||
|
||||
public SPCCounty findCounty(String fips);
|
||||
|
||||
public List<SPCCounty> getCountiesInGeometry(Geometry geo);
|
||||
|
||||
public List<USState> getAllstates();
|
||||
|
||||
public List<USState> loadStateTable();
|
||||
|
||||
public ArrayList<USState> statesInGeometry(Geometry geo);
|
||||
|
||||
public List<Rfc> getAllRfcs();
|
||||
|
||||
public List<Rfc> loadRfcTable();
|
||||
|
||||
public ArrayList<Rfc> rfcsInGeometry(Geometry geo);
|
||||
|
||||
public boolean isRfcLoaded();
|
||||
|
||||
public List<Cwa> getAllCwas();
|
||||
|
||||
public List<Cwa> loadCwaTable();
|
||||
|
||||
public ArrayList<Cwa> cwasInGeometry(Geometry geo);
|
||||
|
||||
public HashMap<String, String> getStateAbrvMap();
|
||||
|
||||
// TRAC 1112
|
||||
public List<String> getActiveCounties(int watchNum, List<SPCCounty> oldCountyList);
|
||||
|
||||
// localization
|
||||
public String getPgenLocalizationRoot();
|
||||
|
||||
public String getFileAbsolutePath(String fileLoczlizationPath);
|
||||
|
||||
public File getFile(String fileLoczlizationPath);
|
||||
|
||||
public LocalizationFile getStaticLocalizationFile(String fileName);
|
||||
|
||||
public LocalizationFile getLocalizationFile(LocalizationContext context, String fileName);
|
||||
|
||||
public LocalizationContext getLocalizationContext(LocalizationType type, LocalizationLevel level);
|
||||
|
||||
public File getStaticFile(String fname);
|
||||
|
||||
public File getGeogFile();
|
||||
|
||||
public File getSfcStnFile();
|
||||
|
||||
public File getFirBoundsFile();
|
||||
|
||||
// This is for sigmet
|
||||
public List<Object[]> queryNcepDB(String field, String table);
|
||||
|
||||
// for TCA
|
||||
public HashMap<String, String> getZoneMap();
|
||||
|
||||
// for GFA
|
||||
public List<FAArea> getFAAreas();
|
||||
|
||||
public List<FAArea> getFAAreaX();
|
||||
|
||||
public List<FARegion> getFARegions();
|
||||
|
||||
public List<GreatLake> getGreatLakes();
|
||||
|
||||
public List<CostalWater> getCostalWaters();
|
||||
|
||||
// for g2g
|
||||
public ArrayList<MultiPolygon> getG2GBounds(String tableAlias, String columnName, String columnValue);
|
||||
|
||||
// for continuing watches
|
||||
public List<String> loadContWatchNum();
|
||||
|
||||
// for clipping bounds
|
||||
public List<String> getBoundsTableList();
|
||||
|
||||
public List<String> getBoundsNames(String table);
|
||||
|
||||
public Polygon loadBounds(String boundsTable, String boundsName);
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,15 +1,12 @@
|
|||
package gov.noaa.nws.ncep.edex.common.metparameters;
|
||||
|
||||
|
||||
import java.text.ParseException;
|
||||
|
||||
import javax.measure.unit.SI;
|
||||
import javax.measure.unit.Unit;
|
||||
import javax.measure.unit.UnitFormat;
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||
|
||||
import com.raytheon.uf.common.serialization.ISerializableObject;
|
||||
import com.raytheon.uf.common.serialization.adapters.UnitAdapter;
|
||||
|
@ -17,7 +14,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
|
|||
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||
|
||||
/**
|
||||
* Class used to hold a value and its units.
|
||||
* Class used to hold a value and its units.
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
|
@ -27,6 +24,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* 10/16/2011 G Hull make sure types of missing data and value are the same;
|
||||
* make value private
|
||||
* 11/14/2011 B Hebbard Resolve unit serialization issues
|
||||
* 04/01/2014 #1040 B Hebbard In syncUnits(), map unitStr "count" to Unit.ONE
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -38,84 +36,85 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
@XmlAccessorType(XmlAccessType.NONE)
|
||||
@DynamicSerialize
|
||||
public class Amount implements ISerializableObject {
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -7742796238434954463L;
|
||||
private static final long serialVersionUID = -7742796238434954463L;
|
||||
|
||||
// TODO : add capability to let user set their own missing data value.
|
||||
@DynamicSerializeElement
|
||||
private Number missing_data_value = new Double( -9999 );
|
||||
// TODO : add capability to let user set their own missing data value.
|
||||
@DynamicSerializeElement
|
||||
private Number missing_data_value = new Double(-9999);
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Number value = missing_data_value;
|
||||
@DynamicSerializeElement
|
||||
private Number value = missing_data_value;
|
||||
|
||||
// Following should always be the string representation of below
|
||||
// "unit"; that is, these must be kept in sync. (We use the String
|
||||
// due to problems serializing the Unit<?>)
|
||||
@DynamicSerializeElement
|
||||
private String unitStr = "";
|
||||
// Following should always be the string representation of below
|
||||
// "unit"; that is, these must be kept in sync. (We use the String
|
||||
// due to problems serializing the Unit<?>)
|
||||
@DynamicSerializeElement
|
||||
private String unitStr = "";
|
||||
|
||||
private transient Unit<?> unit;
|
||||
private transient Unit<?> unit;
|
||||
|
||||
// To simplify the
|
||||
public Amount( Number val, Unit<?> unit) {
|
||||
//System.out.println("Constructor 0 called -- val " + val + " unit " + unit);
|
||||
initMissingDataSentinel();
|
||||
|
||||
setValue( val, unit );
|
||||
}
|
||||
|
||||
public Amount( Unit<?> u ) {
|
||||
//System.out.println("Constructor 1 called -- u " + u);
|
||||
setValue( missing_data_value, u );
|
||||
}
|
||||
// To simplify the
|
||||
public Amount(Number val, Unit<?> unit) {
|
||||
// System.out.println("Constructor 0 called -- val " + val + " unit " +
|
||||
// unit);
|
||||
initMissingDataSentinel();
|
||||
|
||||
public Amount(String unitStr) {
|
||||
//System.out.println("Constructor 2 called -- unitStr " + unitStr);
|
||||
try {
|
||||
Unit<?> u = new UnitAdapter().unmarshal(unitStr);
|
||||
setValue( missing_data_value, u );
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public Amount() {
|
||||
//System.out.println("Constructor 3 called");
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
setValue(val, unit);
|
||||
}
|
||||
|
||||
// TODO Should we allow access to the value without the units? Or
|
||||
// can we let the user assume the stored units for convienience?
|
||||
public Number getValue() {
|
||||
return value;
|
||||
}
|
||||
public Amount(Unit<?> u) {
|
||||
// System.out.println("Constructor 1 called -- u " + u);
|
||||
setValue(missing_data_value, u);
|
||||
}
|
||||
|
||||
// call hasValidValue before calling this method.
|
||||
//
|
||||
public Number getValueAs( Unit<?> unitNeeded ) {
|
||||
if( unitNeeded != unit && unitNeeded.isCompatible( unit ) ) {
|
||||
double newValue = unit.getConverterTo(unitNeeded).convert( value.doubleValue() );
|
||||
return newValue;
|
||||
}
|
||||
else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
public Amount(String unitStr) {
|
||||
// System.out.println("Constructor 2 called -- unitStr " + unitStr);
|
||||
try {
|
||||
Unit<?> u = new UnitAdapter().unmarshal(unitStr);
|
||||
setValue(missing_data_value, u);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
//public Unit<?> getUnit() {
|
||||
// return unit;
|
||||
//}
|
||||
public Amount() {
|
||||
// System.out.println("Constructor 3 called");
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public Unit<?> getUnit() {
|
||||
// TODO Should we allow access to the value without the units? Or
|
||||
// can we let the user assume the stored units for convienience?
|
||||
public Number getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
// call hasValidValue before calling this method.
|
||||
//
|
||||
public Number getValueAs(Unit<?> unitNeeded) {
|
||||
if (unitNeeded != unit && unitNeeded.isCompatible(unit)) {
|
||||
double newValue = unit.getConverterTo(unitNeeded).convert(
|
||||
value.doubleValue());
|
||||
return newValue;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
// public Unit<?> getUnit() {
|
||||
// return unit;
|
||||
// }
|
||||
|
||||
public Unit<?> getUnit() {
|
||||
if (this.unitStr == null)
|
||||
return Unit.ONE;
|
||||
if (this.unit == null) {
|
||||
try {
|
||||
this.unit = (Unit<?>) UnitFormat.getUCUMInstance()
|
||||
.parseObject(this.unitStr);
|
||||
this.unit = (Unit<?>) UnitFormat.getUCUMInstance().parseObject(
|
||||
this.unitStr);
|
||||
} catch (ParseException e) {
|
||||
// logger.warn("ParseException while parsing unit string: "
|
||||
// + this.unit + " defaulting to unit: " + Unit.ONE);
|
||||
|
@ -125,239 +124,234 @@ public class Amount implements ISerializableObject {
|
|||
return this.unit;
|
||||
}
|
||||
|
||||
public void setValue( Amount v ) {
|
||||
//System.out.println("Amount.setValue(Amount v) called with v " + v);
|
||||
setValue( v.value, v.unit );
|
||||
}
|
||||
public void setValue(Amount v) {
|
||||
// System.out.println("Amount.setValue(Amount v) called with v " + v);
|
||||
setValue(v.value, v.unit);
|
||||
}
|
||||
|
||||
public void setValue( Number n ) {
|
||||
//System.out.println("Amount.setValue(Number n) called with n " + n + " and unit " + unit);
|
||||
//setValue( n, unit );
|
||||
value = n;
|
||||
}
|
||||
public void setValue(Number n) {
|
||||
// System.out.println("Amount.setValue(Number n) called with n " + n +
|
||||
// " and unit " + unit);
|
||||
// setValue( n, unit );
|
||||
value = n;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the missing_data_value
|
||||
*/
|
||||
public final Number getMissing_data_value() {
|
||||
return missing_data_value;
|
||||
}
|
||||
/**
|
||||
* @return the missing_data_value
|
||||
*/
|
||||
public final Number getMissing_data_value() {
|
||||
return missing_data_value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param missing_data_value the missing_data_value to set
|
||||
*/
|
||||
public final void setMissing_data_value(Number missing_data_value) {
|
||||
this.missing_data_value = missing_data_value;
|
||||
}
|
||||
/**
|
||||
* @param missing_data_value
|
||||
* the missing_data_value to set
|
||||
*/
|
||||
public final void setMissing_data_value(Number missing_data_value) {
|
||||
this.missing_data_value = missing_data_value;
|
||||
}
|
||||
|
||||
public void setValueAs(Number n, String unitStr) {
|
||||
try {
|
||||
Unit<?> u = new UnitAdapter().unmarshal(unitStr);
|
||||
setValue(n, u);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
public void setValue(Number n, Unit<?> u) {
|
||||
value = n;
|
||||
// unit = u;
|
||||
setUnitPair(u);
|
||||
|
||||
public void setValueAs ( Number n, String unitStr ) {
|
||||
try {
|
||||
Unit<?> u = new UnitAdapter().unmarshal(unitStr);
|
||||
setValue( n, u );
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
public void setValue( Number n, Unit<?> u ) {
|
||||
value = n;
|
||||
//unit = u;
|
||||
setUnitPair(u);
|
||||
// to make the hasValidValue method simpler, ensure that the types
|
||||
// of the value and missing data value are the same.
|
||||
// (wonder if there is a nicer way to do this for all Number types?)
|
||||
if (!value.getClass().equals(missing_data_value.getClass())) {
|
||||
if (value instanceof Double) {
|
||||
missing_data_value = new Double(
|
||||
missing_data_value.doubleValue());
|
||||
} else if (value instanceof Float) {
|
||||
missing_data_value = new Float(missing_data_value.floatValue());
|
||||
} else if (value instanceof Short) {
|
||||
missing_data_value = new Short(missing_data_value.shortValue());
|
||||
} else if (value instanceof Integer) {
|
||||
missing_data_value = new Integer(missing_data_value.intValue());
|
||||
} else if (value instanceof Long) {
|
||||
missing_data_value = new Long(missing_data_value.longValue());
|
||||
} else if (value instanceof Byte) {
|
||||
missing_data_value = new Byte(missing_data_value.byteValue());
|
||||
} else {
|
||||
System.out
|
||||
.println("Amount: missing_data_value and value are of different types?");
|
||||
}
|
||||
}
|
||||
|
||||
// to make the hasValidValue method simpler, ensure that the types
|
||||
// of the value and missing data value are the same.
|
||||
// (wonder if there is a nicer way to do this for all Number types?)
|
||||
if( !value.getClass().equals( missing_data_value.getClass() ) ) {
|
||||
if( value instanceof Double ) {
|
||||
missing_data_value = new Double( missing_data_value.doubleValue() );
|
||||
}
|
||||
else if( value instanceof Float ) {
|
||||
missing_data_value = new Float( missing_data_value.floatValue() );
|
||||
}
|
||||
else if( value instanceof Short ) {
|
||||
missing_data_value = new Short( missing_data_value.shortValue() );
|
||||
}
|
||||
else if( value instanceof Integer ) {
|
||||
missing_data_value = new Integer( missing_data_value.intValue() );
|
||||
}
|
||||
else if( value instanceof Long ) {
|
||||
missing_data_value = new Long( missing_data_value.longValue() );
|
||||
}
|
||||
else if( value instanceof Byte ) {
|
||||
missing_data_value = new Byte( missing_data_value.byteValue() );
|
||||
}
|
||||
else {
|
||||
System.out.println("Amount: missing_data_value and value are of different types?");
|
||||
}
|
||||
}
|
||||
|
||||
// if( hasValidValue() ) {
|
||||
// System.out.println( getClass().getName() +" has valid value "+ n.toString() );
|
||||
// }
|
||||
}
|
||||
// if( hasValidValue() ) {
|
||||
// System.out.println( getClass().getName() +" has valid value "+
|
||||
// n.toString() );
|
||||
// }
|
||||
}
|
||||
|
||||
// make sure the missing data sentinal and the stored value are of the
|
||||
// same type so that the comparisons will work.
|
||||
//
|
||||
public void initMissingDataSentinel( ) { // Number mds ) {
|
||||
// limit the number of
|
||||
if( value instanceof Double ) {
|
||||
missing_data_value = new Double( -9999 );
|
||||
}
|
||||
else if( value instanceof Float ) {
|
||||
missing_data_value = new Float( -9999 );
|
||||
}
|
||||
else if( value instanceof Integer ) {
|
||||
missing_data_value = new Integer( -9999 );
|
||||
}
|
||||
else if( value instanceof Long ) {
|
||||
missing_data_value = new Long( -9999 );
|
||||
}
|
||||
else if( value instanceof Short ) {
|
||||
missing_data_value = new Short( (short)-9999 );
|
||||
}
|
||||
else {
|
||||
System.out.println("Error: Number object in Amount is not one of the supported types: "+
|
||||
"Double, Float, Integer, Long or Short" );
|
||||
}
|
||||
}
|
||||
|
||||
public void setMissingDataSentinel( Number mds ) {
|
||||
missing_data_value = mds;
|
||||
}
|
||||
// make sure the missing data sentinal and the stored value are of the
|
||||
// same type so that the comparisons will work.
|
||||
//
|
||||
public void initMissingDataSentinel() { // Number mds ) {
|
||||
// limit the number of
|
||||
if (value instanceof Double) {
|
||||
missing_data_value = new Double(-9999);
|
||||
} else if (value instanceof Float) {
|
||||
missing_data_value = new Float(-9999);
|
||||
} else if (value instanceof Integer) {
|
||||
missing_data_value = new Integer(-9999);
|
||||
} else if (value instanceof Long) {
|
||||
missing_data_value = new Long(-9999);
|
||||
} else if (value instanceof Short) {
|
||||
missing_data_value = new Short((short) -9999);
|
||||
} else {
|
||||
System.out
|
||||
.println("Error: Number object in Amount is not one of the supported types: "
|
||||
+ "Double, Float, Integer, Long or Short");
|
||||
}
|
||||
}
|
||||
|
||||
public Number getMissingValueSentinel() {
|
||||
return missing_data_value;
|
||||
}
|
||||
|
||||
// convenience method used by PRLibrary.
|
||||
// TODO : replace calls with getValue().doubleValue
|
||||
public double doubleValue() {
|
||||
return value.doubleValue();
|
||||
}
|
||||
public void setMissingDataSentinel(Number mds) {
|
||||
missing_data_value = mds;
|
||||
}
|
||||
|
||||
public boolean hasValidValue() {
|
||||
if (unit == null) {
|
||||
return false;
|
||||
}
|
||||
if( !value.getClass().equals(missing_data_value.getClass() ) ) {
|
||||
//TODO: Do we care? See a lot of Double/Float differences
|
||||
//System.out.println("value and missingDataValue are of different types.");
|
||||
}
|
||||
if( value == null ||
|
||||
value.equals( missing_data_value ) ) {
|
||||
//|| value.doubleValue() <= missing_data_value.doubleValue()) ) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setValueToMissing( ) {
|
||||
value = missing_data_value;
|
||||
}
|
||||
public Number getMissingValueSentinel() {
|
||||
return missing_data_value;
|
||||
}
|
||||
|
||||
public void setUnit( Unit<?> u ) {
|
||||
// Set the unit field only, but not the associated string.
|
||||
/*DEBUG
|
||||
String oldUnitStr = (unitStr == null ? "NULL" : UnitFormat.getUCUMInstance().format(unit));
|
||||
String newUnitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
if (oldUnitStr.equals("Pa") && newUnitStr.equals("hPa")) {
|
||||
System.out.println("In setUnit for... " + this.getClass());
|
||||
System.out.println("...changing from Pa to hPa...");
|
||||
}
|
||||
if (oldUnitStr.equals("hPa") && newUnitStr.equals("Pa")) {
|
||||
System.out.println("In setUnit for... " + this.getClass());
|
||||
System.out.println("...changing from hPa to Pa...");
|
||||
}
|
||||
DEBUG*/
|
||||
unit = u;
|
||||
}
|
||||
// convenience method used by PRLibrary.
|
||||
// TODO : replace calls with getValue().doubleValue
|
||||
public double doubleValue() {
|
||||
return value.doubleValue();
|
||||
}
|
||||
|
||||
public void setUnitPair( Unit<?> u ) {
|
||||
// Set the unit object and string, but do not convert existing value.
|
||||
unit = u;
|
||||
/*DEBUG
|
||||
String newUnitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
if (unitStr.equals("Pa") && newUnitStr.equals("hPa")) {
|
||||
System.out.println("In setUnitPair for... " + this.getClass());
|
||||
System.out.println("...changing from Pa to hPa...");
|
||||
}
|
||||
if (unitStr.equals("hPa") && newUnitStr.equals("Pa")) {
|
||||
System.out.println("In setUnitPair for... " + this.getClass());
|
||||
System.out.println("...changing from hPa to Pa...");
|
||||
}
|
||||
unitStr = newUnitStr;
|
||||
DEBUG*/
|
||||
unitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
}
|
||||
public boolean hasValidValue() {
|
||||
if (unit == null) {
|
||||
return false;
|
||||
}
|
||||
if (!value.getClass().equals(missing_data_value.getClass())) {
|
||||
// TODO: Do we care? See a lot of Double/Float differences
|
||||
// System.out.println("value and missingDataValue are of different types.");
|
||||
}
|
||||
if (value == null || value.equals(missing_data_value)) {
|
||||
// || value.doubleValue() <= missing_data_value.doubleValue()) ) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setUnitPairAndConvertValue( Unit<?> u ) {
|
||||
// Set the unit object and string, and convert an existing value to
|
||||
// the new unit
|
||||
if( hasValidValue() ) {
|
||||
value = getValueAs( u );
|
||||
}
|
||||
|
||||
unit = u;
|
||||
/*DEBUG
|
||||
String newUnitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
if (unitStr.equals("Pa") && newUnitStr.equals("hPa")) {
|
||||
System.out.println("In setUnitPairAndConvertValue for... " + this.getClass());
|
||||
System.out.println("...changing from Pa to hPa...");
|
||||
}
|
||||
if (unitStr.equals("hPa") && newUnitStr.equals("Pa")) {
|
||||
System.out.println("In setUnitPairAndConvertValue for... " + this.getClass());
|
||||
System.out.println("...changing from hPa to Pa...");
|
||||
}
|
||||
unitStr = newUnitStr;
|
||||
DEBUG*/
|
||||
unitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
}
|
||||
|
||||
public void syncUnits() {
|
||||
Unit<?> u;
|
||||
try {
|
||||
u = new UnitAdapter().unmarshal(unitStr);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
//System.out.println("Amount.syncUnits(): 'unitStr' invalid!!");
|
||||
return;
|
||||
}
|
||||
if (unit == null) {
|
||||
//System.out.println("Amount.syncUnits(): 'unit' NULL, using unitStr value " + unitStr);
|
||||
unit = u;
|
||||
}
|
||||
else if (!unit.equals(u)) {
|
||||
//System.out.println("Amount.syncUnits(): 'unit' is " + u + ", but unitStr is " + unitStr + " using the latter");
|
||||
unit = u;
|
||||
}
|
||||
else {
|
||||
//System.out.println("Amount.syncUnits(): Good! 'unit' is " + u + ", and unitStr is " + unitStr);
|
||||
}
|
||||
}
|
||||
public void setValueToMissing() {
|
||||
value = missing_data_value;
|
||||
}
|
||||
|
||||
public String getUnitStr() {
|
||||
return unitStr;
|
||||
}
|
||||
public void setUnit(Unit<?> u) {
|
||||
// Set the unit field only, but not the associated string.
|
||||
/*
|
||||
* DEBUG String oldUnitStr = (unitStr == null ? "NULL" :
|
||||
* UnitFormat.getUCUMInstance().format(unit)); String newUnitStr =
|
||||
* UnitFormat.getUCUMInstance().format(u); if (oldUnitStr.equals("Pa")
|
||||
* && newUnitStr.equals("hPa")) {
|
||||
* System.out.println("In setUnit for... " + this.getClass());
|
||||
* System.out.println("...changing from Pa to hPa..."); } if
|
||||
* (oldUnitStr.equals("hPa") && newUnitStr.equals("Pa")) {
|
||||
* System.out.println("In setUnit for... " + this.getClass());
|
||||
* System.out.println("...changing from hPa to Pa..."); } DEBUG
|
||||
*/
|
||||
unit = u;
|
||||
}
|
||||
|
||||
public void setUnitStr(String unitStr) {
|
||||
this.unitStr = unitStr;
|
||||
}
|
||||
public void setUnitPair(Unit<?> u) {
|
||||
// Set the unit object and string, but do not convert existing value.
|
||||
unit = u;
|
||||
/*
|
||||
* DEBUG String newUnitStr = UnitFormat.getUCUMInstance().format(u); if
|
||||
* (unitStr.equals("Pa") && newUnitStr.equals("hPa")) {
|
||||
* System.out.println("In setUnitPair for... " + this.getClass());
|
||||
* System.out.println("...changing from Pa to hPa..."); } if
|
||||
* (unitStr.equals("hPa") && newUnitStr.equals("Pa")) {
|
||||
* System.out.println("In setUnitPair for... " + this.getClass());
|
||||
* System.out.println("...changing from hPa to Pa..."); } unitStr =
|
||||
* newUnitStr; DEBUG
|
||||
*/
|
||||
unitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
}
|
||||
|
||||
// // convert the current value to
|
||||
// public void changeUnits( Unit<?> u ) {
|
||||
// if( value != MISSING_DATA_VALUE ) {
|
||||
// unit = u;
|
||||
// }
|
||||
// else {
|
||||
//
|
||||
// }
|
||||
// }
|
||||
// TODO : Do we need to worry about rounding errors here?
|
||||
// public Boolean isMissingValue( ) {
|
||||
// return value.doubleValue() == MISSING_DATA_VALUE;
|
||||
// }
|
||||
public void setUnitPairAndConvertValue(Unit<?> u) {
|
||||
// Set the unit object and string, and convert an existing value to
|
||||
// the new unit
|
||||
if (hasValidValue()) {
|
||||
value = getValueAs(u);
|
||||
}
|
||||
|
||||
unit = u;
|
||||
/*
|
||||
* DEBUG String newUnitStr = UnitFormat.getUCUMInstance().format(u); if
|
||||
* (unitStr.equals("Pa") && newUnitStr.equals("hPa")) {
|
||||
* System.out.println("In setUnitPairAndConvertValue for... " +
|
||||
* this.getClass());
|
||||
* System.out.println("...changing from Pa to hPa..."); } if
|
||||
* (unitStr.equals("hPa") && newUnitStr.equals("Pa")) {
|
||||
* System.out.println("In setUnitPairAndConvertValue for... " +
|
||||
* this.getClass());
|
||||
* System.out.println("...changing from hPa to Pa..."); } unitStr =
|
||||
* newUnitStr; DEBUG
|
||||
*/
|
||||
unitStr = UnitFormat.getUCUMInstance().format(u);
|
||||
}
|
||||
|
||||
public void syncUnits() {
|
||||
Unit<?> u;
|
||||
if (unitStr.equals("count")) {
|
||||
u = Unit.ONE;
|
||||
} else {
|
||||
try {
|
||||
u = new UnitAdapter().unmarshal(unitStr);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
// System.out.println("Amount.syncUnits(): 'unitStr' string '"
|
||||
// + unitStr + "' invalid!!");
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (unit == null) {
|
||||
// System.out.println("Amount.syncUnits(): 'unit' NULL, using unitStr value "
|
||||
// + unitStr);
|
||||
unit = u;
|
||||
} else if (!unit.equals(u)) {
|
||||
// System.out.println("Amount.syncUnits(): 'unit' is " + u +
|
||||
// ", but unitStr is " + unitStr + " using the latter");
|
||||
unit = u;
|
||||
} else {
|
||||
// System.out.println("Amount.syncUnits(): Good! 'unit' is " + u +
|
||||
// ", and unitStr is " + unitStr);
|
||||
}
|
||||
}
|
||||
|
||||
public String getUnitStr() {
|
||||
return unitStr;
|
||||
}
|
||||
|
||||
public void setUnitStr(String unitStr) {
|
||||
this.unitStr = unitStr;
|
||||
}
|
||||
|
||||
// // convert the current value to
|
||||
// public void changeUnits( Unit<?> u ) {
|
||||
// if( value != MISSING_DATA_VALUE ) {
|
||||
// unit = u;
|
||||
// }
|
||||
// else {
|
||||
//
|
||||
// }
|
||||
// }
|
||||
// TODO : Do we need to worry about rounding errors here?
|
||||
// public Boolean isMissingValue( ) {
|
||||
// return value.doubleValue() == MISSING_DATA_VALUE;
|
||||
// }
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue