From 586255516067e8bdd4561694cd07980a709ab4ff Mon Sep 17 00:00:00 2001 From: Steve Harris Date: Mon, 6 Jan 2014 14:02:30 -0500 Subject: [PATCH] 14.1.1-14 baseline Former-commit-id: 6df7c67f95f305f605d34911b1fafb6465e02c33 [formerly f2ad9da4eb6388bf3734fa204377fb17236b72ae] [formerly 6df7c67f95f305f605d34911b1fafb6465e02c33 [formerly f2ad9da4eb6388bf3734fa204377fb17236b72ae] [formerly 9ef1c841b931ef87c9417a846c3d3139e0ce8d40 [formerly 46bdb43889bdc681fec2aba45004a1583caaa6ae]]] Former-commit-id: 9ef1c841b931ef87c9417a846c3d3139e0ce8d40 Former-commit-id: 0989c177103a15ecae1e39cb072503b7963d207b [formerly 43cc501e0d731ddbc60510005843a2b9abdbd6e7] Former-commit-id: a2bdee5cc182a220339f3567648065860f7fd465 --- cave/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes cave/build/static/linux/cave/caveUtil.sh | 2 +- .../raytheon/uf/viz/archive/data/SizeJob.java | 51 +- .../uf/viz/archive/ui/AbstractArchiveDlg.java | 23 +- .../.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../src/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../src/com/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../src/com/raytheon/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../src/com/raytheon/uf/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../src/com/raytheon/uf/viz/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../raytheon/uf/viz/monitor/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../uf/viz/monitor/scan/.dm/.dirs.dmdb | Bin 16640 -> 0 bytes .../scan/commondialogs/.dm/.items.dmdb | Bin 16640 -> 0 bytes .../viz/monitor/scan/resource/.dm/.items.dmdb | Bin 16640 -> 0 bytes .../viz/monitor/scan/tables/.dm/.items.dmdb | Bin 16640 -> 0 bytes .../rsc/CrossSectionImageResource.java | 11 +- .../rsc/CrossSectionVectorResource.java | 3 +- .../rsc/TimeHeightImageResource.java | 10 +- .../META-INF/MANIFEST.MF | 3 +- .../xy/timeseries/rsc/TimeSeriesResource.java | 6 +- .../util/TimeSeriesZoomHandler.java | 36 +- .../META-INF/MANIFEST.MF | 3 +- .../xy/varheight/rsc/VarHeightResource.java | 11 +- .../varheight/util/VarHeightZoomHandler.java | 24 +- .../src/com/raytheon/viz/gfe/Activator.java | 6 +- .../com/raytheon/viz/gfe/GridManagerView.java | 21 +- .../viz/gfe/actions/RunProcedureAction.java | 13 +- .../raytheon/viz/gfe/core/DataManager.java | 38 + .../viz/gfe/dialogs/KillJobsOnExitDialog.java | 18 +- .../perspective/GFEPerspectiveManager.java | 13 +- .../viz/gfe/procedures/ProcedureJob.java | 449 ----------- .../viz/gfe/procedures/ProcedureJobPool.java | 432 ++++++++++ .../gfe/procedures/ProcedureSelectionDlg.java | 6 +- .../viz/gfe/procedures/ProcedureUtil.java | 7 +- .../com/raytheon/viz/gfe/rsc/GFEResource.java | 7 +- .../raytheon/viz/gfe/smarttool/SmartUtil.java | 18 +- .../gfe/smarttool/script/SmartToolJob.java | 378 --------- .../smarttool/script/SmartToolJobPool.java | 377 +++++++++ .../script/SmartToolSelectionDlg.java | 17 +- .../viz/grid/rsc/general/GeneralGridData.java | 38 +- .../viz/radar/ui/xy/RadarXsectXYResource.java | 7 +- edexOsgi/build.edex/esb/conf/modes.xml | 8 + .../bufrua/decoder/BUFRUAManLevelAdapter.java | 128 ++- .../plugin/gfe/server/GridParmManager.java | 4 +- .../gfe/server/database/IFPGridDatabase.java | 10 + .../obs/metar/MetarPointDataTransform.java | 3 +- .../res/spring/text-common.xml | 7 +- .../res/spring/text-ingest.xml | 9 +- .../com.raytheon.edex.text.properties | 5 + .../edex/plugin/text/dao/TextDao.java | 30 +- .../TextArchiveFileNameFormatter.java | 82 +- .../archive/config/ArchiveConfigManager.java | 415 +++++++--- .../archive/config/CategoryDataSet.java | 59 +- .../config/CategoryFileDateHelper.java | 94 ++- .../common/archive/config/DataSetStatus.java | 128 +++ .../common/archive/config/FileDateFilter.java | 127 --- .../archive/config/IFileDateHelper.java | 6 +- .../common/archive/config/SelectConfig.java | 13 +- .../archive/config/select/CategorySelect.java | 21 +- .../res/spring/archive-spring.xml | 4 +- ...m.raytheon.uf.edex.archive.cron.properties | 9 +- .../uf/edex/archive/DataArchiver.java | 11 +- .../archive/DatabaseArchiveProcessor.java | 739 ++++++++++++++++++ .../uf/edex/archive/DatabaseArchiver.java | 348 ++------- ...DefaultPluginArchiveFileNameFormatter.java | 121 +-- .../IPluginArchiveFileNameFormatter.java | 37 +- .../uf/edex/archive/purge/ArchivePurger.java | 2 + .../base/archiver/purger/PROCESSED_DATA.xml | 54 +- .../base/archiver/purger/RAW_DATA.xml | 12 +- .../META-INF/MANIFEST.MF | 1 + .../database/cluster/ClusterLockUtils.java | 123 ++- .../uf/edex/database/dao/CoreDao.java | 136 +++- .../uf/edex/database/plugin/PluginDao.java | 102 +-- .../processor/IDatabaseProcessor.java | 69 ++ .../base/dissemination/handleOUP.py | 2 +- .../res/spring/DPADecoder-spring.xml | 5 - .../res/spring/hpeDHRDecoder-spring.xml | 5 - .../res/spring/ohd-common.xml | 17 +- .../res/spring/aww-common.xml | 5 + .../res/spring/nctext-common.xml | 6 +- .../res/spring/nctext-ingest.xml | 5 - 81 files changed, 3014 insertions(+), 1976 deletions(-) delete mode 100644 cave/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/.dm/.dirs.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/commondialogs/.dm/.items.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/resource/.dm/.items.dmdb delete mode 100644 cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/tables/.dm/.items.dmdb delete mode 100644 cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJob.java create mode 100644 cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJobPool.java delete mode 100644 cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJob.java create mode 100644 cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJobPool.java create mode 100644 edexOsgi/com.raytheon.edex.plugin.text/resources/com.raytheon.edex.text.properties create mode 100644 edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/DataSetStatus.java delete mode 100644 edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/FileDateFilter.java create mode 100644 edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java create mode 100644 edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java diff --git a/cave/.dm/.dirs.dmdb b/cave/.dm/.dirs.dmdb deleted file mode 100644 index 730698d65cf7fdd3ff38baba67d61b7734685385..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&&rZTH90%~?p9>d#g!uqgwstWbmM|FzhcSX^qPJzvFe4iogG67!ix1(WdGIm( zb&Ldd;UXc*w@LeJ$6EUPkPu4dxNd7`J6DXc2Gt5Ae8oz<}_1Rwwb2<%!web0aG zdJ}m6{Sqk7|HWBhe2jZ z-Mo}_vt|lm2zzy8II=Abp$loWgq~MN3h6ATp3kC3OE+~{7gAPpciLBfxwj%xjulCL zjp`X|dM+Q>_rp%h>+#z*zw175*XwnM-hkgU?Oy5Aqd97>Dq7xBvnjLE5*R`N0uX=z T1Rwwb2tWV=5P$##wj%HYuk|4T diff --git a/cave/build/static/linux/cave/caveUtil.sh b/cave/build/static/linux/cave/caveUtil.sh index 0e3d27f9da..a0bcf5ec78 100644 --- a/cave/build/static/linux/cave/caveUtil.sh +++ b/cave/build/static/linux/cave/caveUtil.sh @@ -120,7 +120,7 @@ function copyVizShutdownUtilIfNecessary() function getPidsOfMyRunningCaves() { local user=`whoami` - local caveProcs=`ps -ef | grep "/awips2/cave/cave " | grep -v "grep" | grep $user` + local caveProcs=`ps -ef | grep -E "(/awips2/cave|/usr/local/viz)/cave " | grep -v "grep" | grep $user` # preserve IFS and set it to line feed only local PREV_IFS=$IFS diff --git a/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/data/SizeJob.java b/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/data/SizeJob.java index c4c6bfdb87..f796cf72a6 100644 --- a/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/data/SizeJob.java +++ b/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/data/SizeJob.java @@ -4,6 +4,7 @@ import java.io.File; import java.util.ArrayList; import java.util.Calendar; import java.util.Comparator; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -48,6 +49,8 @@ import com.raytheon.uf.common.time.util.TimeUtil; * Jul 24, 2013 #2220 rferrel Change to get all data sizes only one time. * Aug 02, 2013 #2224 rferrel Changes for new configuration files. * Aug 06, 2013 #2222 rferrel Changes to display all selected data. + * Dec 11, 2013 #2603 rferrel Selected list changed to a Set. + * Dec 11, 2013 #2624 rferrel Clear display variables when recomputing sizes. * * * @@ -231,6 +234,8 @@ public class SizeJob extends Job { */ public void recomputeSize() { clearQueue(); + displayArchive = null; + displayCategory = null; for (ArchiveInfo archiveInfo : archiveInfoMap.values()) { for (String categoryName : archiveInfo.getCategoryNames()) { CategoryInfo categoryInfo = archiveInfo.get(categoryName); @@ -300,19 +305,19 @@ public class SizeJob extends Job { for (String archiveName : getArchiveNames()) { ArchiveInfo archiveInfo = get(archiveName); for (String categoryName : archiveInfo.getCategoryNames()) { - List selectionsList = selections.getSelectedList( + Set selectionsSet = selections.getSelectedSet( archiveName, categoryName); MissingData missingData = removeMissingData(archiveName, categoryName); if (missingData != null) { - missingData.setSelectedList(selectionsList); + missingData.setSelectedSet(selectionsSet); addMissingData(missingData); } else { CategoryInfo categoryInfo = archiveInfo.get(categoryName); for (DisplayData displayData : categoryInfo .getDisplayDataList()) { String displayLabel = displayData.getDisplayLabel(); - boolean selected = selectionsList + boolean selected = selectionsSet .contains(displayLabel); if (selected != displayData.isSelected()) { setSelect(displayData, selected); @@ -506,10 +511,10 @@ public class SizeJob extends Job { visibleList = manager.getDisplayData(displayArchive, displayCategory, false); - List selectedList = selections.getSelectedList(displayArchive, + Set selectedSet = selections.getSelectedSet(displayArchive, displayCategory); for (DisplayData displayData : visibleList) { - displayData.setSelected(selectedList.contains(displayData + displayData.setSelected(selectedSet.contains(displayData .getDisplayLabel())); } @@ -528,10 +533,10 @@ public class SizeJob extends Job { schedule(); } } else { - selectedList = selections.getSelectedList(archiveName, + selectedSet = selections.getSelectedSet(archiveName, categoryName); MissingData missingData = new MissingData(archiveName, - categoryName, selectedList); + categoryName, selectedSet); missingDataQueue.add(missingData); } } @@ -658,14 +663,11 @@ public class SizeJob extends Job { break mainLoop; } - // System.out.println("+++SizeJob: " + currentDisplayData); - List files = manager.getDisplayFiles(currentDisplayData, startCal, endCal); // Size no longer needed. if (currentDisplayData != sizeQueue.peek()) { - // System.out.println("---SizeJob: " + currentDisplayData); continue mainLoop; } @@ -682,7 +684,6 @@ public class SizeJob extends Job { // Skip when size no longer needed. if (stopComputeSize) { - // System.out.println("---SizeJob: " + currentDisplayData); continue mainLoop; } } @@ -692,7 +693,6 @@ public class SizeJob extends Job { displayQueue.add(currentDisplayData); } - // System.out.println("xxxSizeJob: OK_STATUS"); shutdownDisplayTimer.set(true); return Status.OK_STATUS; } @@ -748,15 +748,10 @@ public class SizeJob extends Job { displayQueue.size()); displayQueue.drainTo(list); - // for (DisplayData displayData : list) { - // System.out.println("== " + displayData); - // } - // for (IUpdateListener listener : listeners) { listener.update(list); } } else if (shutdownDisplayTimer.get()) { - // System.out.println("xxx updateDisplayTimer canceled"); displayTimer.cancel(); displayTimer = null; } @@ -773,7 +768,6 @@ public class SizeJob extends Job { */ @Override protected void canceling() { - // System.err.println("canceling SizeJob"); clearQueue(); missingDataQueue.clear(); missingDataJob.cancel(); @@ -789,28 +783,28 @@ public class SizeJob extends Job { protected final String category; - protected final List selectedList; + protected final Set selectedSet; protected boolean visiable = false; public MissingData(String archive, String category, - List selectedList) { + Set selectedSet) { this.archive = archive; this.category = category; - this.selectedList = new ArrayList(selectedList); + this.selectedSet = new HashSet(selectedSet); } public boolean isSelected() { - return !selectedList.isEmpty(); + return !selectedSet.isEmpty(); } public void setVisiable(boolean state) { this.visiable = state; } - public void setSelectedList(List selectedList) { - this.selectedList.clear(); - this.selectedList.addAll(selectedList); + public void setSelectedSet(Set selectedSet) { + this.selectedSet.clear(); + this.selectedSet.addAll(selectedSet); } @Override @@ -861,8 +855,7 @@ public class SizeJob extends Job { String archiveName = currentMissingData.archive; String categoryName = currentMissingData.category; - // System.out.println("== missingData: " + currentMissingData); - List selectedList = currentMissingData.selectedList; + Set selectedSet = currentMissingData.selectedSet; List displayDatas = manager.getDisplayData( archiveName, categoryName, false); if (shutdown.get()) { @@ -870,7 +863,7 @@ public class SizeJob extends Job { } for (DisplayData displayData : displayDatas) { - displayData.setSelected(selectedList.contains(displayData + displayData.setSelected(selectedSet.contains(displayData .getDisplayLabel())); sizeQueue.add(displayData); } @@ -883,13 +876,11 @@ public class SizeJob extends Job { } } - // System.out.println("xxx missingData"); return Status.OK_STATUS; } @Override protected void canceling() { - // System.err.println("canceling MissingDataJob"); shutdown.set(true); } } diff --git a/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/ui/AbstractArchiveDlg.java b/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/ui/AbstractArchiveDlg.java index 3bc0e0706d..1840f6293e 100644 --- a/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/ui/AbstractArchiveDlg.java +++ b/cave/com.raytheon.uf.viz.archive/src/com/raytheon/uf/viz/archive/ui/AbstractArchiveDlg.java @@ -76,6 +76,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog; * Aug 01, 2013 2221 rferrel Changes for select configuration. * Aug 06, 2013 2222 rferrel Changes to display all selected data. * Nov 14, 2013 2549 rferrel Get category data moved off the UI thread. + * Dec 11, 2013 2624 rferrel No longer clear table prior to populating. * * * @author bgonzale @@ -131,6 +132,10 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements /** Which table is being displayed. */ private boolean showingSelected = true; + private String previousSelectedArchive = null; + + private String previousSelectedCategory = null; + /** * @param parentShell */ @@ -386,7 +391,11 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements * Method invoked when archive combo selection is changed. */ protected void archiveComboSelection() { - populateCategoryCbo(); + String selectedArchvieName = getSelectedArchiveName(); + if (!selectedArchvieName.equals(previousSelectedArchive)) { + previousSelectedArchive = selectedArchvieName; + populateCategoryCbo(); + } } /** @@ -412,7 +421,14 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements * Method invoked when the category combo selection is changed. */ protected void categoryComboSelection() { - populateTableComp(); + String archiveName = getSelectedArchiveName(); + String categoryName = getSelectedCategoryName(); + if (!archiveName.equals(previousSelectedArchive) + || !categoryName.equals(previousSelectedCategory)) { + previousSelectedArchive = archiveName; + previousSelectedCategory = categoryName; + populateTableComp(); + } } /** @@ -463,9 +479,6 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements setCursorBusy(true); setShowingSelected(false); - tableComp.populateTable(archiveName, categoryName, - new ArrayList(0)); - tableComp.refresh(); Job job = new Job("populate category table") { diff --git a/cave/com.raytheon.uf.viz.monitor.scan/.dm/.dirs.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/.dm/.dirs.dmdb deleted file mode 100644 index 72bab39d1ab0f15ad4a51a64e0b476842144fe82..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&K~BOz6oBDDaKlE9z`}(a3++r}STqqW>W0Lq7iek?HPVt&Nc0kV2#@3;JccvS zCWRPQBue;S@|fwgoy_|}7t_=q4hIi?|3V0HuCpdYOE;!%nASwu(PQGUa%|4+eE7rL zYh8q?O~=vqvR(WhRQjf?9cm|QV;mx|ErIem+tz%{v!4RyfAqDVrBgKm2q1s}0tg_0 z00Nr@iv54F*I(KHZ@!VU2q1s}0tg_000IagfB*srAh0t6#s0ruEcj;I3vUw$Ab{=fM4dgc56?%KNmbB!7M&{>~m;mX?e*r!et@5@Grd6Z4# zH1X`NZ#|vGkDl$?swb6dw)MJxsi|h_YUyl< q`@73(QF`oHr-}0Yh*+ntAevNZS_!Vf1vmspV&E9q zZ6c*9BN9aVTXJo`+LGU|(~D9+2rlo({<#q1O!tlu`+6{A&y1#36CD$~wYB-K_013N zu5}V7H{JHW*5m4Pr#3fJ?a(=G#8@JL00IagfB*srAbcc`~AblvjDTl#NKu9M%i@Z?mG6NyJ>IhL5O9X&yy_m z?4jlK^Z0od785TDm+?TKw({_$n8aCXEgr39@@h@9G%2#&nnz(enCFq%)Sun|Qb_bHEK&%>8u7-y+zK-E6f(DAlNLd@cP zl4PlGw=AcT$Is)i82V8-iNIM1Hq zsPLujoyxY?@?6((JJl;EkeAYNZC5&7*Pf3+x-u}go=lSm-}Y?TcBO3SeXD)`(qrvV z)%Bl1_QYN1e$eaQT$fF%+OZls>Wd_V*`RSCBY*$`2q1s}0tg_000Iagu#N)Xa=sL{ diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/.dm/.dirs.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/.dm/.dirs.dmdb deleted file mode 100644 index 99cdff69427e455f787dfc5160599e2dc34acc07..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI(u};E37{KuZbYaoK2ViiurR_D!&_qm87ZL}3fTosEBP}T{iN1_JgpcIlV|WLx zp~Qi~#FG5K?#{{Gf)0eX$jy{pzPCiT))?n5;lZ|hEoWt1)A zH1VvyVRy6Wc^>3bFASEE8KyHM3tsYRlqNlX4G^RN diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/.dm/.dirs.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/.dm/.dirs.dmdb deleted file mode 100644 index 41940fd55c91111c0078ce30a37f92c586229c51..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&O-{l<6u|L8aKlE9;KHpf?M!1>G!aw379=i=3wNZJP$MlVg+#C81w4d@@cn^am>9G6p?h-?zpd?EuYKxn+WTq{Vi9Gt zI8A(~Yq{+#dYKOMu^$c>ksYQJD;vJ%<0wt6`N-1$W=+y0&eP1Ag~P<2WuXbO%OHC` zvUN~9I**@x$8%IqD%Gy% z@Jr5(6P-^-_m+6LeXKs9*UUZB%JUOZcd5oRM*sl?5I_I{1Q0*~0R#|0U|Rw|5iS!A diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/.dm/.dirs.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/.dm/.dirs.dmdb deleted file mode 100644 index a517d9dc7690c106e6d15735865a2b60fb520e98..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&O-{l<6bJCZk6^lyBe-yDOFPpT7EQzyKNb~ZxIj}&sF9YGLZVl4;~_i@2k-#C zSEdGtJ4{H!f0E%%r?1Vt-)^S(LC_m?{c9n_74?P?EeafuIC857G$)Q%uK8Z=n;$&h zQW0DmwbR8imcIupV;+)2<8EUPmk@ve1Rwwb2tWV=5P$##AOHaf)IdP*|MgyfZU0|` z)nkbe*joWUm;Bz_>QVT%0@b$VZr@1fc+&-2; zhw(3Zw zjIs_|_a8Mk&S}0y{X_))hxL#1Jb7$WdHyG diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/.dm/.dirs.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/.dm/.dirs.dmdb deleted file mode 100644 index b1215f56d1c36a2ab5f2a3ac90ceb66f2c14250e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&O-{l<6u|MKuwf%daN*XLw$m6EO+@R*24kWZAhm=VX-O$0dMXd$VLX5ba9*7z zg}5{j;{PSXd_CyQZ&L;ujYe0G!|1}YtaDvjmetdh)NQFH>V_V(I*q=}?R`X}`y0Ip z*_)1|&!4(_J!rJ0sU2Fk8)I}4KmY**5ZJareP7=8b-5k_2q1s}0tozH0r~#%weQKWAD1O7x6f2E?$p)=prHWRjiU?k>*+G_HD0Q zBro%EISu3SDskd`W*6hva+>6sy`0#q^v#~-Sz6|WEuZWxidd4II>}j19G&EtBuA3E z^CEvv;xbfjaHa-9Kk$9eA2xelq^_0cyT0Nv7^>mp&g>K-gfv;3oFZ!1~ xw@&nUPnVAMa5=ndY+8@WoP^j^vFXxCr-=Xp2q1s}0tg_000IagfWXcKz5yz27VH23 diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/.dm/.dirs.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/.dm/.dirs.dmdb deleted file mode 100644 index 3629c1037196383b68af23b78e36f0f0d036fd72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI(!A{#S7zc2-WsFr~;}JOQO41~*CJvQWNMJ~WPQauccF95*nMzY7X(f094m=D8 zB(w|aL+k-|*kO;s=fq}B)lDF!La2XBt{wM_`$zLXBt_ zL@SEw&=`9$b}hbp_QiHTZc`E>H!5%br#1jK%$IyZ%KtzOMdwn!8L5P(4W0^&LPrTl4&Gs^RSH6JaU z|BHdRf&c^{009U<00IzrsK7j({U2(AI0!%h0uXp&0eSwf$z^;fdpWBp5P$##AOHaf zKmY;|fB*y_009U*xqv+X%Xid;-}g5r-UWylF*c&Q>_?}kQST`9y3x0UvY@e_R7JVX z8^(q~oP<%YWi&Li9tYjNm!7nIZy4xww-$S6=}8dvw861949_*1E=;3XOMI`VC$TSr zbQPozj&%ysWssf)71mJi$InpbJ9CSYIZ#oiz#+2** z|4-B!$DF3k`HSuyQC&>EuiZdwR9R?Dx!zDeE3R$updjDUaNhcMH76K4}00bZa0SG_<0uXrq0(S=xW0n8_ diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/commondialogs/.dm/.items.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/commondialogs/.dm/.items.dmdb deleted file mode 100644 index 9637c565e7b0215edab7edde88ff851f155a9d5f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&O^(_y7zW@#+f6p@V!CJ1Es0}00Tz+SOkk?2psK%X6U>k)At-TZJ16ORdV(IJ ziyosk1Sx~6N@zx^s=k(t?c}rJdz_6U;>l$6Hj2+J%Npw1vaBb%G3`UsnsVsqYu2O2 z*xdW)OFVgXsgp3d>GJsJzTJJc8-3H&4(+c;*D!$`5x z-dse}ck550Sj&05x^k|?Ry32*aX0qrujPbh(qmE$*fzY^U(7>K465} zi*zNshZt%nOPPEut1Jw9ge8Q{W`g;gkbb|%1Jd^cDF~%OLhOZ9$+BCfH!`GTzj%v54f_54mz5itgxvFceBcwGG0GxFxW_!kbtSrbbi_Rx`^=}DvGZUYP;=KG zGtX4Ym{%{QgsRGrQ{U$Ue!;n&mR1MJ;8%cB+N5O>dkmU#BWBoH7w>w=5D9=c!obSEc*A z4Y4;xek~Jqly%d9#(d0oS<;coU)Pv39v`MQwn%5$+~H0~@9Xfew{@$Zk@j5oJGwr# zcE4%*A$mO>#qrBa=SB2BYAm5+xUQz0?Hg-9-GJfB*y_009U<00Izz00ba# Gg1|2xV^25$ diff --git a/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/resource/.dm/.items.dmdb b/cave/com.raytheon.uf.viz.monitor.scan/src/com/raytheon/uf/viz/monitor/scan/resource/.dm/.items.dmdb deleted file mode 100644 index 2b4d5e85ce4909d0cd7d44908e617b51bfb1ba90..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16640 zcmeI&!EV|x7zgmsZI@iO!@4ssKoZAx2!}|d4Q!gkHUZn+1S8TFf)WR9eS$sJw1?P3 z>@j9TqEggbD@}BNOXhQuFCpL0T%3@MMuTabTv(RX*S%#~uXHf+xrwGV1HI3Baj}Q|00Izz00bZa0SG_<0uX?}2?*5d z|9Y)|XZ?QycaK|y00bcLAc5xj?t?CZlb@V`dEWi{$yJ4dAOHafKmY;|`0oYk_5ZUP z;@x9|w9y~{0SG_<0uX=z1Rwwb2tWV=5cuZ;_4>bl2X^QG{r#h70VYT9YyHyke3FXw zSeAKJq%sjoI9FmT4yw_4zjdB&t)Kg7SzB3@S-y^l=lFJ}C;^N4W5dejT-MRqMa zO-^X1OPPKxt5rljHt+Ho6Mi6t6r6{GcRf$Y1(^l2u4gZ#N|&87yO9wg0SWDu%2s+z z!+`Q`FAVozR&Hb(F~Ym7$30G|Ph)*)LJ4Kmqdgi>-ybmIC!BJhQF;*!1K%9wLrP5d zDJAu#lu%U}F`skRW0#ECS=o`_R3TC&=aFYNmr~4i!PTr(5wQz-y%A~|rD7}HG+#M| z_^y^RUpv*p*=9G+Dqm+RFPu`tx@D1?B)3U&tA(qR+&anKB^5Qld+%J>nWyp6vt`TMi9b4yRfs?B3n!I$&UpA$)=p5u-c#&vMxmqE2WZUh?LOg1UW($ zJwO+|LA&U(2k4@U9-xaJqHicsq7)Hs9J{7~H^6X)nu&>YUeDOOkU%({v+@O5=_q@%vW}&aUpu^Em8^>_BT!EaM zatl0z3ZMd2R{);l!)#Hgx})eHDu4Du4>00;s^FE3o+fhu^GpS^Ix~Gdr}}Xd5`T_Zze)tzME&f-vm{ zy`l1l-_~CTv!E~=u=@k-pzQrRZw+InCdpuY?NL%^<{(w0)9E1csHDQ>l$h~gs7`Ym z%MAM}{4SY}JXT1g9HoPi@+fyK zE^XIw=7S|SD)a=Ews0kpoHO3y1a3&VfXcYVZPpf)`I1W}I6t#4ET+%O3r=-qoYTdv z(jc8Co?uc6SDXuB4w6&#eWr+tJfe5|Du`ghQxM9d=0pu|g0%02!Av#6@u)rten|Uj zd|jW$_1WM}eKfuvq~l3F34`lKG6}U&BR6VH;|3Tt7DkQCDAm#@jjfR|Zzki{Doi)# zTbw=f#f8Y`$J#JAU46lYpI=%C4$M!77UI~>e z_Ag)5zi7Q`t)Kv#{CCk4=x1fC_p3{4VARYER~tR%?>UE8=dCZ#)o^%k9Ex=g?G=YS zq?RL{h`FvSLQ4u_D@xsfF;~Slv5OoE;ELRlLz`Q>$l=yV>@)*y(@ERd@Wzh*m diff --git a/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionImageResource.java b/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionImageResource.java index 30a951a78a..d9e8135014 100644 --- a/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionImageResource.java +++ b/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionImageResource.java @@ -71,6 +71,7 @@ import com.vividsolutions.jts.geom.Coordinate; * ------------ ---------- ----------- -------------------------- * Nov 29, 2007 njensen Initial creation * 02/17/09 njensen Refactored to new rsc architecture + * Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect * * * @@ -288,13 +289,15 @@ public class CrossSectionImageResource extends AbstractCrossSectionResource IExtent extent = descriptor.getGraph(this).getExtent(); + double val = Double.NaN; - if (extent.contains(new double[] { coord.getObject().x, - coord.getObject().y })) { + double[] worldCoord = descriptor.pixelToWorld(new double[] { + coord.getObject().x, coord.getObject().y }); + if (extent.contains(worldCoord)) { try { - DirectPosition2D dp = new DirectPosition2D(coord.getObject().x, - coord.getObject().y); + DirectPosition2D dp = new DirectPosition2D(worldCoord[0], + worldCoord[1]); descriptor.getGridGeometry().getGridToCRS().transform(dp, dp); val = reproj.reprojectedGridCell(sampler, (int) dp.x, (int) dp.y); diff --git a/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionVectorResource.java b/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionVectorResource.java index 16eceac2c6..6f2873a47c 100644 --- a/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionVectorResource.java +++ b/cave/com.raytheon.uf.viz.xy.crosssection/src/com/raytheon/uf/viz/xy/crosssection/rsc/CrossSectionVectorResource.java @@ -59,6 +59,7 @@ import com.vividsolutions.jts.geom.Coordinate; * ------------ ---------- ----------- -------------------------- * Jun 15, 2010 bsteffen Initial creation * Feb 14, 2011 8244 bkowal enabled magnification capability. + * Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect * * * @@ -178,7 +179,7 @@ public class CrossSectionVectorResource extends AbstractCrossSectionResource { String s = null; Coordinate c = coord.getObject(); DataTime time = descriptor.getTimeForResource(this); - double[] values = descriptor.getGraph(this).getGridLocation(c.x, c.y); + double[] values = descriptor.pixelToWorld(new double[] { c.x, c.y }); // if geometry has not been created yet dont sample if (geometry == null) { diff --git a/cave/com.raytheon.uf.viz.xy.timeheight/src/com/raytheon/uf/viz/xy/timeheight/rsc/TimeHeightImageResource.java b/cave/com.raytheon.uf.viz.xy.timeheight/src/com/raytheon/uf/viz/xy/timeheight/rsc/TimeHeightImageResource.java index 0dee9ff574..b3bf5de7d2 100644 --- a/cave/com.raytheon.uf.viz.xy.timeheight/src/com/raytheon/uf/viz/xy/timeheight/rsc/TimeHeightImageResource.java +++ b/cave/com.raytheon.uf.viz.xy.timeheight/src/com/raytheon/uf/viz/xy/timeheight/rsc/TimeHeightImageResource.java @@ -62,6 +62,7 @@ import com.vividsolutions.jts.geom.Coordinate; * ------------ ---------- ----------- -------------------------- * Dec 4, 2007 njensen Initial creation * Feb 20, 2009 njensen Refactored to new rsc architecture + * Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect * * * @@ -273,12 +274,13 @@ public class TimeHeightImageResource extends AbstractTimeHeightResource IExtent extent = descriptor.getGraph(this).getExtent(); double val = Double.NaN; - if (extent.contains(new double[] { coord.getObject().x, - coord.getObject().y })) { + double[] worldCoord = descriptor.pixelToWorld(new double[] { + coord.getObject().x, coord.getObject().y }); + if (extent.contains(worldCoord)) { try { - DirectPosition2D dp = new DirectPosition2D(coord.getObject().x, - coord.getObject().y); + DirectPosition2D dp = new DirectPosition2D(worldCoord[0], + worldCoord[1]); descriptor.getGridGeometry().getGridToCRS().transform(dp, dp); val = reproj.reprojectedGridCell(sampler, (int) dp.x, (int) dp.y); diff --git a/cave/com.raytheon.uf.viz.xy.timeseries/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.xy.timeseries/META-INF/MANIFEST.MF index 908de32e7f..3390b08b0b 100644 --- a/cave/com.raytheon.uf.viz.xy.timeseries/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.xy.timeseries/META-INF/MANIFEST.MF @@ -8,7 +8,8 @@ Bundle-Vendor: RAYTHEON Eclipse-RegisterBuddy: com.raytheon.viz.core, com.raytheon.uf.viz.core Eclipse-BuddyPolicy: ext, global Require-Bundle: org.eclipse.ui, - org.eclipse.core.runtime + org.eclipse.core.runtime, + org.geotools Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.viz.xy.timeseries, diff --git a/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/rsc/TimeSeriesResource.java b/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/rsc/TimeSeriesResource.java index 9e8e7b17d0..5388e4bae2 100644 --- a/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/rsc/TimeSeriesResource.java +++ b/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/rsc/TimeSeriesResource.java @@ -96,6 +96,7 @@ import com.vividsolutions.jts.geom.Geometry; * Feb 10, 2011 8244 bkowal enabled the magnification * capability. * Feb 14, 2011 8244 bkowal enabled magnification for wind barbs. + * Dec 19, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect * * * @@ -580,7 +581,10 @@ public class TimeSeriesResource extends @Override public String inspect(ReferencedCoordinate coord) throws VizException { String inspect = null; - Coordinate c = descriptor.getGraphCoordiante(this, coord.getObject()); + double[] worldCoord = descriptor.pixelToWorld( + new double[] { coord.getObject().x, coord.getObject().y }); + Coordinate c = descriptor.getGraphCoordiante(this, + new Coordinate(worldCoord[0], worldCoord[1])); if (c != null && data != null) { double[] vals = data.inspectXY(c); NumberFormat nf = NumberFormat.getInstance(); diff --git a/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/util/TimeSeriesZoomHandler.java b/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/util/TimeSeriesZoomHandler.java index e96132d4cd..02dba0e768 100644 --- a/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/util/TimeSeriesZoomHandler.java +++ b/cave/com.raytheon.uf.viz.xy.timeseries/src/com/raytheon/uf/viz/xy/timeseries/util/TimeSeriesZoomHandler.java @@ -22,7 +22,11 @@ package com.raytheon.uf.viz.xy.timeseries.util; import java.util.Stack; import org.eclipse.swt.widgets.Event; +import org.geotools.geometry.DirectPosition2D; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.viz.core.IDisplayPaneContainer; import com.raytheon.uf.viz.core.drawables.IRenderableDisplay; import com.raytheon.uf.viz.xy.AbstractGraphInputHandler; @@ -42,6 +46,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 16, 2009 mschenke Initial creation + * Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate for zoom * * * @@ -51,6 +56,9 @@ import com.vividsolutions.jts.geom.Coordinate; public class TimeSeriesZoomHandler extends AbstractGraphInputHandler { + private static final transient IUFStatusHandler statusHandler = UFStatus + .getHandler(TimeSeriesZoomHandler.class); + private MousePreferenceManager prefManager = MousePreferenceManager .getInstance(); @@ -103,7 +111,7 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler { private boolean zoomIn(int x, int y) { IDisplayPaneContainer editor = display.getContainer(); - Coordinate grid = editor.translateClick(x, y); + Coordinate grid = translateClick(x, y); if (grid == null) { return false; } @@ -129,7 +137,7 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler { private boolean zoomOut(int x, int y) { IDisplayPaneContainer editor = display.getContainer(); - Coordinate grid = editor.translateClick(x, y); + Coordinate grid = translateClick(x, y); if (grid == null) { return false; } @@ -153,4 +161,28 @@ public class TimeSeriesZoomHandler extends AbstractGraphInputHandler { return true; } + private Coordinate translateClick(int x, int y) { + IDisplayPaneContainer editor = display.getContainer(); + XyGraphDescriptor desc = (XyGraphDescriptor) editor + .getActiveDisplayPane().getDescriptor(); + Coordinate grid = editor.translateClick(x, y); + if (grid == null) { + return null; + } + /* Convert from the overall display coordinate space to the coordinate + * space for our resource. + */ + DirectPosition2D dp = new DirectPosition2D(grid.x, grid.y); + try { + desc.getGridGeometry().getGridToCRS().transform(dp, dp); + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, + "Error converting coordinate", e); + } + grid.x = dp.x; + grid.y = dp.y; + grid.z = 0; + return grid; + } + } diff --git a/cave/com.raytheon.uf.viz.xy.varheight/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.xy.varheight/META-INF/MANIFEST.MF index e05c73bf24..d0137e20e0 100644 --- a/cave/com.raytheon.uf.viz.xy.varheight/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.xy.varheight/META-INF/MANIFEST.MF @@ -7,7 +7,8 @@ Bundle-Activator: com.raytheon.uf.viz.xy.varheight.Activator Bundle-Vendor: RAYTHEON Eclipse-RegisterBuddy: com.raytheon.viz.core, com.raytheon.uf.viz.core Require-Bundle: org.eclipse.core.runtime, - org.eclipse.ui;bundle-version="3.4.1" + org.eclipse.ui;bundle-version="3.4.1", + org.geotools Bundle-RequiredExecutionEnvironment: JavaSE-1.6 Bundle-ActivationPolicy: lazy Import-Package: com.raytheon.uf.common.dataplugin, diff --git a/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/rsc/VarHeightResource.java b/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/rsc/VarHeightResource.java index 200341ed45..0626f01f39 100644 --- a/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/rsc/VarHeightResource.java +++ b/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/rsc/VarHeightResource.java @@ -78,6 +78,7 @@ import com.vividsolutions.jts.geom.Geometry; * ------------ ---------- ----------- -------------------------- * Nov 23, 2009 mschenke Initial creation * Feb 10, 2011 8344 bkowal enabled the magnification capability. + * Dec 19, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect * * * @@ -543,9 +544,13 @@ public class VarHeightResource extends @Override public String inspect(ReferencedCoordinate coord) throws VizException { Coordinate object = coord.getObject(); - object = descriptor.getGraphCoordiante(this, object); - if (object != null) { - return object.x + ", " + object.y; + double[] worldCoord = descriptor.pixelToWorld( + new double[] { object.x, object.y }); + Coordinate c = new Coordinate(worldCoord[0], worldCoord[1]); + + c = descriptor.getGraphCoordiante(this, c); + if (c != null) { + return c.x + ", " + c.y; } return null; } diff --git a/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/util/VarHeightZoomHandler.java b/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/util/VarHeightZoomHandler.java index d097c2d50b..03da2b34d2 100644 --- a/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/util/VarHeightZoomHandler.java +++ b/cave/com.raytheon.uf.viz.xy.varheight/src/com/raytheon/uf/viz/xy/varheight/util/VarHeightZoomHandler.java @@ -20,7 +20,11 @@ package com.raytheon.uf.viz.xy.varheight.util; import org.eclipse.swt.widgets.Event; +import org.geotools.geometry.DirectPosition2D; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.viz.core.IDisplayPaneContainer; import com.raytheon.uf.viz.core.drawables.IRenderableDisplay; import com.raytheon.uf.viz.core.drawables.ResourcePair; @@ -44,6 +48,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jul 3, 2010 bsteffen Initial creation + * Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate for zoom * * * @@ -52,6 +57,9 @@ import com.vividsolutions.jts.geom.Coordinate; */ public class VarHeightZoomHandler extends AbstractGraphInputHandler { + private static final transient IUFStatusHandler statusHandler = UFStatus + .getHandler(VarHeightZoomHandler.class); + private MousePreferenceManager prefManager = MousePreferenceManager .getInstance(); @@ -113,12 +121,24 @@ public class VarHeightZoomHandler extends AbstractGraphInputHandler { && zoomIndex < ZoomMenuAction.ZOOM_LEVELS.length - 1) { zoomIndex += 1; } + + /* Convert from the overall display coordinate space to the coordinate + * space for our resource. + */ + DirectPosition2D dp = new DirectPosition2D(grid.x, grid.y); + try { + desc.getGridGeometry().getGridToCRS().transform(dp, dp); + } catch (Exception e) { + statusHandler.handle(Priority.PROBLEM, + "Error converting coordinate for zoom", e); + } + for (ResourcePair rsc : desc.getResourceList()) { if (rsc.getResource() instanceof IGraphableResource) { IGraph graph = desc.getGraph((IGraphableResource) rsc .getResource()); - if (graph.getExtent().contains(new double[] { grid.x, grid.y })) { - graph.zoom((int) Math.pow(2, zoomIndex), grid); + if (graph.getExtent().contains(new double[] { dp.x, dp.y })) { + graph.zoom((int) Math.pow(2, zoomIndex), new Coordinate(dp.x, dp.y)); } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/Activator.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/Activator.java index 3416f2bdc0..d2c70bbc84 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/Activator.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/Activator.java @@ -29,8 +29,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.viz.gfe.dialogs.GFEConfigDialog; -import com.raytheon.viz.gfe.procedures.ProcedureJob; -import com.raytheon.viz.gfe.smarttool.script.SmartToolJob; /** * The activator class controls the plug-in life cycle @@ -43,6 +41,8 @@ import com.raytheon.viz.gfe.smarttool.script.SmartToolJob; * ------------ ---------- ----------- -------------------------- * Initial creation * Oct 30, 2012 1298 rferrel Must be a blocking dialog. + * Dec 09, 2013 #2367 dgilling Remove shutdown of ProcedureJob and + * SmartToolJob. * * * @@ -92,8 +92,6 @@ public class Activator extends AbstractUIPlugin implements BundleActivator { @Override public void stop(BundleContext context) throws Exception { plugin = null; - ProcedureJob.shutdown(); - SmartToolJob.shutdown(); super.stop(context); } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/GridManagerView.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/GridManagerView.java index 1b08f40d78..746cedf8c6 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/GridManagerView.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/GridManagerView.java @@ -38,8 +38,6 @@ import com.raytheon.viz.gfe.core.parm.Parm; import com.raytheon.viz.gfe.dialogs.KillJobsOnExitDialog; import com.raytheon.viz.gfe.dialogs.SaveParameterDialog; import com.raytheon.viz.gfe.gridmanager.GridManager; -import com.raytheon.viz.gfe.procedures.ProcedureJob; -import com.raytheon.viz.gfe.smarttool.script.SmartToolJob; import com.raytheon.viz.ui.DetachedViewListener; import com.raytheon.viz.ui.color.BackgroundColor; import com.raytheon.viz.ui.color.IBackgroundColorChangedListener.BGColorMode; @@ -56,6 +54,7 @@ import com.raytheon.viz.ui.color.IBackgroundColorChangedListener.BGColorMode; * adding cancel capability and if error on * save then the close is cancelled. * 10/30/2012 #1298 rferrel Must keep blocking dialogs to work with eclipse plugins. + * 12/10/2013 #2367 dgilling Use new ProcedureJobePool and SmartToolJobPool. * * * @author dfitch @@ -138,11 +137,12 @@ public class GridManagerView extends ViewPart implements ISaveablePart2 { @Override public int promptToSaveOnClose() { // Check for any running/queued jobs. - if (ProcedureJob.haveJobs() || SmartToolJob.haveJobs()) { + if (dataManager.getProcedureJobPool().isActive() + || dataManager.getSmartToolJobPool().isActive()) { Shell shell = PlatformUI.getWorkbench().getActiveWorkbenchWindow() .getShell(); - - KillJobsOnExitDialog dialog = new KillJobsOnExitDialog(shell); + KillJobsOnExitDialog dialog = new KillJobsOnExitDialog(shell, + dataManager); // Must keep modal and blocking in order to work with eclipse // plugins. dialog.setBlockOnOpen(true); @@ -187,13 +187,10 @@ public class GridManagerView extends ViewPart implements ISaveablePart2 { @Override public boolean isDirty() { - if ((dataManager != null && dataManager.getParmManager() - .getModifiedParms().length > 0) - || SmartToolJob.haveJobs() - || ProcedureJob.haveJobs()) { - return true; - } - return false; + return ((dataManager != null) && (dataManager.getParmManager() + .getModifiedParms().length > 0)) + || dataManager.getProcedureJobPool().isActive() + || dataManager.getSmartToolJobPool().isActive(); } @Override diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/actions/RunProcedureAction.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/actions/RunProcedureAction.java index 1f0bd1b4e7..7839a08908 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/actions/RunProcedureAction.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/actions/RunProcedureAction.java @@ -32,7 +32,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.viz.gfe.core.DataManager; -import com.raytheon.viz.gfe.procedures.ProcedureJob; +import com.raytheon.viz.gfe.core.DataManagerUIFactory; import com.raytheon.viz.gfe.procedures.ProcedureRequest; import com.raytheon.viz.gfe.procedures.ProcedureSelectionDlg; import com.raytheon.viz.gfe.procedures.ProcedureUtil; @@ -47,8 +47,9 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Nov 4, 2008 njensen Initial creation - * Nov 15, 2012 1298 rferrel Changes for non-blocking ProcedureSelectionDlg. + * Nov 04, 2008 njensen Initial creation + * Nov 15, 2012 #1298 rferrel Changes for non-blocking ProcedureSelectionDlg. + * Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool. * * * @author njensen @@ -69,11 +70,11 @@ public class RunProcedureAction extends AbstractHandler { @Override public Object execute(ExecutionEvent event) throws ExecutionException { String procedureName = event.getParameter("name"); - DataManager dm = DataManager.getCurrentInstance(); + DataManager dm = DataManagerUIFactory.getCurrentInstance(); try { List varList = dm.getProcedureInterface() .getVarDictWidgets(procedureName); - if (varList == null || varList.size() == 0) { + if (varList == null || varList.isEmpty()) { // no VariableList found on procedure, just run it PreviewInfo pi = ProcedureUtil.checkAndBuildPreview(dm, procedureName); @@ -81,7 +82,7 @@ public class RunProcedureAction extends AbstractHandler { ProcedureRequest req = ProcedureUtil.buildProcedureRequest( procedureName, dm); if (req != null) { - ProcedureJob.enqueue(dm, req); + dm.getProcedureJobPool().schedule(req); } } } else { diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/DataManager.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/DataManager.java index 721d9188a1..1aa76a05b8 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/DataManager.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/core/DataManager.java @@ -69,10 +69,12 @@ import com.raytheon.viz.gfe.itool.IToolController; import com.raytheon.viz.gfe.itool.IToolFactory; import com.raytheon.viz.gfe.jobs.AutoSaveJob; import com.raytheon.viz.gfe.procedures.ProcedureFactory; +import com.raytheon.viz.gfe.procedures.ProcedureJobPool; import com.raytheon.viz.gfe.procedures.ProcedureUIController; import com.raytheon.viz.gfe.smarttool.EditActionProcessor; import com.raytheon.viz.gfe.smarttool.GridCycler; import com.raytheon.viz.gfe.smarttool.script.SmartToolFactory; +import com.raytheon.viz.gfe.smarttool.script.SmartToolJobPool; import com.raytheon.viz.gfe.smarttool.script.SmartToolUIController; import com.raytheon.viz.gfe.textformatter.TextProductManager; @@ -97,6 +99,7 @@ import com.raytheon.viz.gfe.textformatter.TextProductManager; * 04/24/2013 1936 dgilling Move initialization of TextProductMgr * to GFE startup. * 08/27/2013 2302 randerso Code cleanup for AutoSaveJob + * 12/09/2013 2367 dgilling Instantiate ProcedureJobPool here. * * * @@ -195,6 +198,10 @@ public class DataManager { private List allSites; + private final ProcedureJobPool procJobPool; + + private final SmartToolJobPool toolJobPool; + public IISCDataAccess getIscDataAccess() { return iscDataAccess; } @@ -228,6 +235,8 @@ public class DataManager { strInitJob.schedule(); initializeScriptControllers(); + procJobPool = new ProcedureJobPool(4, 4, this); + toolJobPool = new SmartToolJobPool(3, 3, this); this.weGroupManager = new WEGroupManager(this); this.editActionProcessor = new EditActionProcessor(this); @@ -297,6 +306,28 @@ public class DataManager { procedureInterface.dispose(); } + // by moving the the pools' cancel calls to another thread, we prevent + // GFE shutdown from freezing the UI thread until all jobs have + // completed. The unfortunate side effect is that we get that annoying + // "Job found still running after platform shutdown" warning from + // Eclipse. + Runnable killJobPools = new Runnable() { + + @Override + public void run() { + if (toolJobPool != null) { + toolJobPool.cancel(); + } + + if (procJobPool != null) { + procJobPool.cancel(); + } + } + }; + Thread killPoolsThread = new Thread(killJobPools, "shutdown-gfe-pools"); + killPoolsThread.setDaemon(false); + killPoolsThread.start(); + NotificationManagerJob.removeObserver("edex.alerts.gfe", router); } @@ -689,4 +720,11 @@ public class DataManager { return textProductMgr; } + public ProcedureJobPool getProcedureJobPool() { + return procJobPool; + } + + public SmartToolJobPool getSmartToolJobPool() { + return toolJobPool; + } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/KillJobsOnExitDialog.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/KillJobsOnExitDialog.java index c1808c867c..5a809bb65e 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/KillJobsOnExitDialog.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/dialogs/KillJobsOnExitDialog.java @@ -30,8 +30,7 @@ import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; -import com.raytheon.viz.gfe.procedures.ProcedureJob; -import com.raytheon.viz.gfe.smarttool.script.SmartToolJob; +import com.raytheon.viz.gfe.core.DataManager; import com.raytheon.viz.ui.dialogs.CaveJFACEDialog; /** @@ -44,6 +43,8 @@ import com.raytheon.viz.ui.dialogs.CaveJFACEDialog; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jun 13, 2011 rferrel Initial creation + * Dec 10, 2013 #2367 dgilling Rewrite to use new ProcedureJobPool and + * SmartToolJobPool. * * * @@ -54,13 +55,16 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog { private Composite top; + private final DataManager dataMgr; + /** * Use defaults of -240, minimum and 240 max. */ - public KillJobsOnExitDialog(Shell parent) { + public KillJobsOnExitDialog(Shell parent, DataManager dataMgr) { super(parent); int style = this.getShellStyle() | SWT.MODELESS | SWT.TITLE | SWT.CLOSE; this.setShellStyle(style); + this.dataMgr = dataMgr; } @Override @@ -77,9 +81,9 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog { private void initializeComponents() { - int cnt[] = ProcedureJob.getJobCount(); + int cnt[] = dataMgr.getProcedureJobPool().getWorkRemaining(); GridData data = null; - if (cnt[0] > 0 || cnt[1] > 0) { + if ((cnt[0] > 0) || (cnt[1] > 0)) { Label lab = new Label(top, SWT.NONE); lab.setText(String .format("Have %d procedure(s) running and %d procedures(s) pending", @@ -88,8 +92,8 @@ public class KillJobsOnExitDialog extends CaveJFACEDialog { lab.setLayoutData(data); } - cnt = SmartToolJob.getJobCount(); - if (cnt[0] > 0 || cnt[1] > 0) { + cnt = dataMgr.getSmartToolJobPool().getWorkRemaining(); + if ((cnt[0] > 0) || (cnt[1] > 0)) { Label lab = new Label(top, SWT.NONE); lab.setText(String .format("Have %d Smart tool(s) running and %d Smart tool(s) pending", diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/perspective/GFEPerspectiveManager.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/perspective/GFEPerspectiveManager.java index 9902f4f472..1fd339dd28 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/perspective/GFEPerspectiveManager.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/perspective/GFEPerspectiveManager.java @@ -60,9 +60,7 @@ import com.raytheon.viz.gfe.core.DataManagerUIFactory; import com.raytheon.viz.gfe.core.GFEMapRenderableDisplay; import com.raytheon.viz.gfe.core.ISpatialDisplayManager; import com.raytheon.viz.gfe.core.internal.GFESpatialDisplayManager; -import com.raytheon.viz.gfe.procedures.ProcedureJob; import com.raytheon.viz.gfe.rsc.GFELegendResourceData; -import com.raytheon.viz.gfe.smarttool.script.SmartToolJob; import com.raytheon.viz.gfe.statusline.ISCSendEnable; import com.raytheon.viz.ui.EditorUtil; import com.raytheon.viz.ui.cmenu.ZoomMenuAction; @@ -88,6 +86,8 @@ import com.raytheon.viz.ui.perspectives.VizPerspectiveListener; * Jul 7, 2011 #9897 ryu close formatters on perspective close/reset * Aug 20,2012 #1077 randerso Added support for bgColor setting * Oct 23, 2012 #1287 rferrel Changes for non-blocking FormattrLauncherDialog. + * Dec 09, 2013 #2367 dgilling Remove shutdown of ProcedureJob and + * SmartToolJob. * * * @author randerso @@ -235,15 +235,6 @@ public class GFEPerspectiveManager extends AbstractCAVEPerspectiveManager { DataManagerUIFactory.dispose(perspectiveWindow); - // Put on own thread so close is not slowed down. - new Thread(new Runnable() { - - @Override - public void run() { - ProcedureJob.shutdown(); - SmartToolJob.shutdown(); - } - }).start(); FormatterlauncherAction.closeDialog(); } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJob.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJob.java deleted file mode 100644 index 21c8f4523e..0000000000 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJob.java +++ /dev/null @@ -1,449 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.viz.gfe.procedures; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import jep.JepException; - -import org.eclipse.core.runtime.IProgressMonitor; -import org.eclipse.core.runtime.IStatus; -import org.eclipse.core.runtime.Status; -import org.eclipse.core.runtime.jobs.Job; - -import com.raytheon.uf.common.dataplugin.gfe.StatusConstants; -import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.common.time.TimeRange; -import com.raytheon.uf.viz.core.jobs.AbstractQueueJob; -import com.raytheon.viz.gfe.Activator; -import com.raytheon.viz.gfe.GFEException; -import com.raytheon.viz.gfe.core.DataManager; -import com.raytheon.viz.gfe.jobs.AsyncProgressJob; - -/** - * Job for running GFE procedures. Since JEP/JNI requires that the thread that - * initialized the python interpreter is the same one that runs it, this job - * initializes an interpreter for procedures and then sleeps until a request is - * enqueued. - * - *
- * 
- * SOFTWARE HISTORY
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Oct 8, 2009             njensen     Initial creation
- * Jan 8, 2013  1486       dgilling    Support changes to BaseGfePyController.
- * Jan 18, 2013 1509       njensen     Garbage collect after running procedure
- * Apr 03, 2013    1855  njensen   Never dispose interpreters until shutdown and
- *                                                      reuse interpreter if called from procedure
- * 
- * 
- * - * @author njensen - * @version 1.0 - */ - -public class ProcedureJob extends AbstractQueueJob { - /** - * Maximum number of jobs to keep for a given Data Manager. - */ - private final static int maxJobs = 4; - - /** - * Index of job with the queue. Will break code if not zero. - */ - private final static int QUEUE_JOB_INDEX = 0; - - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(ProcedureJob.class); - - private static Map> instanceMap = null; - - private ProcedureController python; - - private DataManager dataMgr; - - private ProcedureRequest request; - - protected ProcedureJob(DataManager dataMgr) { - super("GFE Procedures Job"); - this.dataMgr = dataMgr; - } - - private void getRequest() throws InterruptedException { - if (instanceMap == null) { - request = null; - return; - } - - List jobList = instanceMap.get(dataMgr); - if (jobList == null || jobList.size() == 0 - || jobList.get(QUEUE_JOB_INDEX).queue == null) { - request = null; - } else { - request = jobList.get(QUEUE_JOB_INDEX).queue.poll(1000L, - TimeUnit.MILLISECONDS); - } - } - - /* - * (non-Javadoc) - * - * @seeorg.eclipse.core.runtime.jobs.Job#run(org.eclipse.core.runtime. - * IProgressMonitor) - */ - @Override - protected IStatus run(IProgressMonitor monitor) { - try { - python = ProcedureFactory.buildController(dataMgr); - } catch (JepException e) { - ProcedureJob.removeJob(dataMgr, this); - return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID, - "Error initializing procedure python", e); - } - - try { - while (monitor.isCanceled() == false) { - // ProcedureRequest request; - try { - getRequest(); - } catch (InterruptedException e) { - continue; - } - // May have been canceled while waiting. - if (monitor.isCanceled()) { - break; - } - synchronized (this) { - try { - if (request != null) { - python.processFileUpdates(); - processRequest(request); - if (request != null) { - request.requestComplete(null); - } - } - } catch (Throwable t) { - statusHandler.handle(Priority.PROBLEM, - "Error running procedure ", t); - if (request != null) { - request.requestComplete(t); - } - } finally { - request = null; - } - } - } - } finally { - if (python != null) { - python.dispose(); - python = null; - } - } - - return Status.OK_STATUS; - } - - /** - * Remove a job from the Data Manger's job list. - * - * @param dataMgr - * - The job's data manager - * @param job - * - The job to remove - */ - private static synchronized void removeJob(DataManager dataMgr, - ProcedureJob job) { - if (instanceMap == null) { - return; - } - - List jobList = instanceMap.get(dataMgr); - - if (jobList != null) { - jobList.remove(job); - - // Removing job with queue remove job list so next request will set - // up new queue. - if (job.queue != null) { - jobList.clear(); - instanceMap.remove(dataMgr); - } - } - } - - public void processRequest(ProcedureRequest request) { - this.execute(python, request.getProcedureName(), request.getRefSet(), - request.getTimeRange(), request.getVarDict()); - this.dataMgr.getEditActionProcessor().wrapUpExecute( - request.getPreview(), false); - } - - /** - * This manages the scheduling of jobs to service a Data Manger's requests. - * - * @param dataMgr - * - Data Manger for the request - * @param request - * - The request to service - * @return state - true when job available to process request otherwise - * false and request is queued to wait for next available job - */ - public static synchronized boolean enqueue(DataManager dataMgr, - ProcedureRequest request) { - if (instanceMap == null) { - instanceMap = new HashMap>(); - } - - Thread currentThread = Thread.currentThread(); - List jobList = instanceMap.get(dataMgr); - if (jobList == null) { - jobList = new ArrayList(); - // Add the first job which contains the queue used by all jobs in - // the list. - ProcedureJob job = new ProcedureJob(dataMgr); - jobList.add(job); - instanceMap.put(dataMgr, jobList); - job.setSystem(true); - job.schedule(); - } - boolean jobAvailable = false; - ProcedureJob alreadyOnThread = null; - for (ProcedureJob job : jobList) { - Thread jobThread = job.getThread(); - if (currentThread == jobThread) { - // this occurs when a running procedure uses - // SmartScript.callProcedure() - // for efficiency we want to just stay on this thread - alreadyOnThread = job; - jobAvailable = true; - break; - } else if (job.request == null) { - jobAvailable = true; - break; - } - } - - // All jobs for data manager are busy, add another if we haven't - // reached the limit. - if (alreadyOnThread == null && !jobAvailable - && jobList.size() < maxJobs) { - ProcedureJob job = new ProcedureJob(dataMgr); - job.setSystem(true); - jobList.add(job); - // Never used additional job's queue - job.queue = null; - job.schedule(); - jobAvailable = true; - } - - if (alreadyOnThread != null) { - try { - alreadyOnThread.processRequest(request); - request.requestComplete(null); - } catch (Throwable t) { - statusHandler.handle(Priority.PROBLEM, - "Error running procedure ", t); - request.requestComplete(t); - } - } else { - jobList.get(QUEUE_JOB_INDEX).enqueue(request); - } - return jobAvailable; - } - - /** - * This returns an array of two integers the first is the number of - * Procedure Tool Jobs being processed and the second is the number in the - * queue waiting to be processed. - * - * @return cnts - */ - public static int[] getJobCount() { - int[] cnt = new int[] { 0, 0 }; - if (instanceMap != null) { - for (List jobList : instanceMap.values()) { - cnt[1] += jobList.get(QUEUE_JOB_INDEX).queue.size(); - for (ProcedureJob job : jobList) { - if (job.request != null) { - ++cnt[0]; - } - } - } - } - return cnt; - } - - /** - * Determine if there are any Procedure Tool Jobs queued and/or being - * processed. - * - * @return true when there are job(s)s queued or being processed otherwise - * false - */ - public static boolean haveJobs() { - boolean result = false; - - if (instanceMap != null) { - for (List jobList : instanceMap.values()) { - // Any pending requests. - if (jobList.get(QUEUE_JOB_INDEX).queue.size() > 0) { - result = true; - break; - } - - // Any requests being processed. - for (ProcedureJob job : jobList) { - if (job.request != null) { - result = true; - break; - } - } - } - } - return result; - } - - /** - * This terminates all the Data Managers' jobs. - */ - public static synchronized void shutdown() { - // TODO This currently joins with a job waiting for it to finish which - // can take a long time and may even be waiting for user to input. Must - // find a wait to kill any GUI associated with a request and if python - // running a way to terminate it so no waiting is involved. - if (instanceMap != null) { - for (List jobList : instanceMap.values()) { - jobList.get(QUEUE_JOB_INDEX).queue.clear(); - - // Do in reverse order so last job cancel is the one with the - // queue. - for (int index = jobList.size() - 1; index >= 0; --index) { - jobList.get(index).cancel(); - } - } - - for (List jobList : instanceMap.values()) { - for (ProcedureJob job : jobList) { - synchronized (job) { - try { - if (job.getState() != Job.NONE) { - job.join(); - } - } catch (InterruptedException ex) { - System.err.println("here SmartToolJob"); - } - } - } - } - - for (List jobList : instanceMap.values()) { - jobList.clear(); - } - - instanceMap.clear(); - instanceMap = null; - } - } - - /** - * Executes a procedure - * - * @param procedureName - * the name of the procedure - * @param refSet - * the edit area to run the procedure against - * @param timeRange - * the time range to run the procedure against - * @param varDict - * the cached varDict for the procedure, or null if there is none - * (should be null unless called from within another procedure) - */ - private void execute(ProcedureController controller, String procedureName, - ReferenceData refSet, TimeRange timeRange, String varDict) { - - Job progressJob = new AsyncProgressJob(procedureName, this); - IStatus pjStatus = Status.CANCEL_STATUS; - try { - List argNames = controller.getMethodArguments( - procedureName, "execute"); - Map argMap = getArgValues(argNames, refSet, - timeRange); - controller.setVarDict(varDict); - progressJob.schedule(); - controller.executeProcedure(procedureName, argMap); - pjStatus = Status.OK_STATUS; - } catch (Exception e) { - pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID, - "Error in procedure " + procedureName, e); - statusHandler.handle(Priority.PROBLEM, "Error executing procedure " - + procedureName, e); - } catch (JepException e) { - pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID, - "Error in procedure " + procedureName, e); - statusHandler.handle(Priority.PROBLEM, "Error executing procedure " - + procedureName, e); - } finally { - controller.garbageCollect(); - progressJob.done(pjStatus); - } - } - - /** - * Maps a procedure's execute's argument name to an object - * - * @param args - * the name of the objects - * @param refSet - * the edit area to run the procedure on - * @param timeRange - * the time range to run the procedure on - * @return a map of argument names to objects - * @throws GFEException - */ - private Map getArgValues(List args, - ReferenceData refSet, TimeRange timeRange) throws GFEException { - Map argValueMap = new HashMap(); - // For each argument in args, append a value to the argValueList - for (String arg : args) { - if (arg.equals("varDict")) { - argValueMap.put("varDict", null); - } else if (arg.equals("editArea")) { - argValueMap.put("editArea", refSet); - } else if (arg.equals("timeRange")) { - argValueMap.put("timeRange", timeRange); - } else if (arg.equals("self")) { - // skip - } else { - throw new GFEException("Unknown argument " + arg); - } - - } - return argValueMap; - } - -} diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJobPool.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJobPool.java new file mode 100644 index 0000000000..0f6016dc80 --- /dev/null +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureJobPool.java @@ -0,0 +1,432 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.viz.gfe.procedures; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import jep.JepException; + +import org.eclipse.core.runtime.IProgressMonitor; +import org.eclipse.core.runtime.IStatus; +import org.eclipse.core.runtime.Status; +import org.eclipse.core.runtime.jobs.Job; + +import com.raytheon.uf.common.dataplugin.gfe.StatusConstants; +import com.raytheon.uf.common.dataplugin.gfe.reference.ReferenceData; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.time.TimeRange; +import com.raytheon.uf.common.time.util.TimeUtil; +import com.raytheon.viz.gfe.Activator; +import com.raytheon.viz.gfe.GFEException; +import com.raytheon.viz.gfe.core.DataManager; +import com.raytheon.viz.gfe.jobs.AsyncProgressJob; + +/** + * Job pool for running GFE procedures. Since JEP/JNI requires that the thread + * that initialized the python interpreter is the same one that runs it, this + * pool initializes an interpreter for procedures and then sleeps until a + * request is enqueued. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Dec 09, 2013  #2367    dgilling     Initial creation
+ * 
+ * 
+ * + * @author dgilling + * @version 1.0 + */ + +public class ProcedureJobPool { + + protected LinkedBlockingQueue workQueue = new LinkedBlockingQueue(); + + protected LinkedBlockingQueue jobQueue = new LinkedBlockingQueue(); + + protected List jobList; + + protected boolean cancel = false; + + protected Object cancelLock = new Object(); + + protected Object joinLock = new Object(); + + private final DataManager dataMgr; + + private final int poolMaxSize; + + /** + * Creates a new ProcedureJobPool with the specified size parameters. + * + * @param corePoolSize + * The minimum size of the job pool--will always have at least + * this many Jobs ready to execute. + * @param poolMaxSize + * The maximum size of the job pool. + * @param dataMgr + * DataManager instance. + */ + public ProcedureJobPool(int corePoolSize, int poolMaxSize, + DataManager dataMgr) { + this.dataMgr = dataMgr; + this.poolMaxSize = poolMaxSize; + for (int i = 0; i < corePoolSize; i++) { + Job job = new ProcedureJob(this.dataMgr); + jobQueue.add(job); + } + this.jobList = new CopyOnWriteArrayList(); + } + + /** + * Enqueue the specified request into the job pool's request queue. Will be + * worked by first available job. If calling from an existing thread in the + * job pool, that thread will be reused to execute the request. + * + * @param request + * ProcedureRequest containing information on procedure to + * execute. + */ + public void schedule(ProcedureRequest request) { + ProcedureJob reuseJob = null; + + // do not schedule while canceling(cancel should be fast). + synchronized (cancelLock) { + if (cancel) { + return; + } + // do not schedule while joining, join might be slow but the javaDoc + // warns others. + synchronized (joinLock) { + boolean jobAvailable = false; + Thread currentThread = Thread.currentThread(); + for (Job job : jobList) { + Thread jobThread = job.getThread(); + ProcedureJob procJob = (ProcedureJob) job; + if (currentThread == jobThread) { + // this occurs when a running procedure uses + // SmartScript.callProcedure() + // for efficiency we want to just stay on this thread + reuseJob = procJob; + jobAvailable = true; + break; + } else if (!procJob.isRunning()) { + jobAvailable = true; + } + } + + if (reuseJob == null) { + if (!jobAvailable) { + Job job = jobQueue.poll(); + if ((job == null) && (jobList.size() < poolMaxSize)) { + job = new ProcedureJob(dataMgr); + } + if (job != null) { + job.schedule(); + jobList.add(job); + } + } + workQueue.offer(request); + } + } + } + + if (reuseJob != null) { + reuseJob.processRequest(request); + } + } + + /** + * Join on the Jobs in the pool. Attempting to schedule other Jobs will + * block until join has returned so be careful when calling + */ + public void join() { + synchronized (joinLock) { + for (Job j : jobList) { + try { + j.join(); + } catch (InterruptedException e) { + // Ignore interupt + } + } + } + } + + /** + * Cancel the job pool, will clear out the workQueue then join on all jobs + * running. Once canceled all future calls to schedule will be ignored. + */ + public void cancel() { + cancel(true); + } + + /** + * Cancel the job pool, will clear out the workQueue and optionally join + * running jobs. Once canceled all future calls to schedule will be ignored. + * + * @param join + * true if you want to join before returning. + */ + public void cancel(boolean join) { + synchronized (cancelLock) { + cancel = true; + workQueue.clear(); + for (Job j : jobList) { + j.cancel(); + } + } + if (join) { + join(); + } + } + + /** + * Cancels the specified request. Returns true if the provided request was + * waiting to be run but now is not. Returns false if the provided request + * is already running or if it was not enqueued to begin with. + * + * @param request + * The request to cancel. + * @return True, if the request was in the queue. False, if it was already + * being worked by the pool or if it was not in the queue. + */ + public boolean cancel(ProcedureRequest request) { + return workQueue.remove(request); + } + + /** + * A job pool is considered active if any of the jobs it contains are + * servicing a request or there is still requests to be worked off in the + * queue. + * + * @return If any jobs are working off a request or there are requests still + * in the work queue. + */ + public boolean isActive() { + if (!workQueue.isEmpty()) { + return true; + } + for (Job job : jobList) { + ProcedureJob procJob = (ProcedureJob) job; + if (procJob.isRunning()) { + return true; + } + } + return false; + } + + /** + * Get the number requests remaining in the queue and the number of jobs in + * the pool currently working off a request. + * + * @return The number requests remaining in the queue and the number of jobs + * in the pool currently working off a request. + */ + public int[] getWorkRemaining() { + int jobsRunning = 0; + for (Job job : jobList) { + ProcedureJob procJob = (ProcedureJob) job; + if (procJob.isRunning()) { + jobsRunning++; + } + } + + return new int[] { jobsRunning, workQueue.size() }; + } + + protected class ProcedureJob extends Job { + + private final IUFStatusHandler statusHandler = UFStatus + .getHandler(ProcedureJob.class); + + private ProcedureController python; + + private final DataManager dataMgr; + + private volatile boolean running; + + public ProcedureJob(DataManager dataMgr) { + super("GFE Procedures Job"); + this.dataMgr = dataMgr; + this.running = false; + setSystem(true); + } + + @Override + protected IStatus run(IProgressMonitor monitor) { + try { + python = ProcedureFactory.buildController(dataMgr); + } catch (JepException e) { + jobList.remove(this); + statusHandler.error("Error initializing procedure python", e); + return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID, + "Error initializing procedure python", e); + } + + IStatus statusCode = Status.OK_STATUS; + try { + while (!monitor.isCanceled()) { + try { + ProcedureRequest request = null; + try { + request = workQueue.poll( + TimeUtil.MILLIS_PER_SECOND, + TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + statusCode = Status.CANCEL_STATUS; + break; + } + + if (monitor.isCanceled()) { + statusCode = Status.CANCEL_STATUS; + break; + } + + if (request != null) { + running = true; + + python.processFileUpdates(); + if (monitor.isCanceled()) { + statusCode = Status.CANCEL_STATUS; + break; + } + + processRequest(request); + running = false; + } + } catch (Throwable t) { + statusHandler.error( + "Unhandled exception in ProcedureJob.", t); + } + } + } finally { + if (python != null) { + python.dispose(); + python = null; + } + } + + return statusCode; + } + + protected void processRequest(ProcedureRequest request) { + Object retVal = null; + try { + execute(python, request); + retVal = null; + } catch (Throwable t) { + statusHandler + .handle(Priority.PROBLEM, "Error running procedure " + + request.getProcedureName(), t); + retVal = t; + } finally { + dataMgr.getEditActionProcessor().wrapUpExecute( + request.getPreview(), false); + request.requestComplete(retVal); + } + } + + /** + * Executes a procedure + * + * @param procedureName + * the name of the procedure + * @param request + * the request containing data on the procedure to run. + * @throws Exception + * @throws JepException + */ + private void execute(ProcedureController controller, + ProcedureRequest request) throws Exception, JepException { + String procedureName = request.getProcedureName(); + Job progressJob = new AsyncProgressJob(procedureName, this); + IStatus pjStatus = Status.CANCEL_STATUS; + progressJob.schedule(); + + try { + List argNames = controller.getMethodArguments( + procedureName, "execute"); + Map argMap = getArgValues(argNames, + request.getRefSet(), request.getTimeRange()); + controller.setVarDict(request.getVarDict()); + controller.executeProcedure(procedureName, argMap); + pjStatus = Status.OK_STATUS; + } catch (Exception e) { + pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID, + "Error in procedure " + procedureName, e); + throw e; + } catch (JepException e) { + pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID, + "Error in procedure " + procedureName, e); + throw e; + } finally { + controller.garbageCollect(); + progressJob.done(pjStatus); + } + } + + /** + * Maps a procedure's execute's argument name to an object + * + * @param args + * the name of the objects + * @param refSet + * the edit area to run the procedure on + * @param timeRange + * the time range to run the procedure on + * @return a map of argument names to objects + * @throws GFEException + */ + private Map getArgValues(List args, + ReferenceData refSet, TimeRange timeRange) throws GFEException { + Map argValueMap = new HashMap(); + // For each argument in args, append a value to the argValueList + for (String arg : args) { + if (arg.equals("varDict")) { + argValueMap.put("varDict", null); + } else if (arg.equals("editArea")) { + argValueMap.put("editArea", refSet); + } else if (arg.equals("timeRange")) { + argValueMap.put("timeRange", timeRange); + } else if (arg.equals("self")) { + // skip + } else { + throw new GFEException("Unknown argument " + arg); + } + + } + return argValueMap; + } + + public boolean isRunning() { + return running; + } + } +} diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureSelectionDlg.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureSelectionDlg.java index b5656882db..415b56bd35 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureSelectionDlg.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureSelectionDlg.java @@ -36,7 +36,8 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Feb 9, 2010 njensen Initial creation + * Feb 09, 2010 njensen Initial creation + * Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool. * * * @@ -67,8 +68,7 @@ public class ProcedureSelectionDlg extends SelectionDlg { .transformVarDict(getValues()); req.setVarDict(varDict); req.setPreview(pi); - // ProcedureJob.getInstance(dataMgr).enqueue(req); - ProcedureJob.enqueue(dataMgr, req); + dataMgr.getProcedureJobPool().schedule(req); } } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureUtil.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureUtil.java index 0cbf79a5ae..8ace2dd5cd 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureUtil.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/procedures/ProcedureUtil.java @@ -44,8 +44,9 @@ import com.raytheon.viz.gfe.smarttool.PreviewInfo; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Feb 9, 2010 njensen Initial creation - * 4/26/2012 14748 ryu Use edit area and time range from preview info + * Feb 09, 2010 njensen Initial creation + * Apr 26, 2012 14748 ryu Use edit area and time range from preview info + * Dec 09, 2013 #2367 dgilling Use new ProcedureJobPool. * * * @@ -123,7 +124,7 @@ public class ProcedureUtil { }); } - ProcedureJob.enqueue(dm, req); + dm.getProcedureJobPool().schedule(req); return req.getResult(); } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/rsc/GFEResource.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/rsc/GFEResource.java index 028ced353e..6a204d55fc 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/rsc/GFEResource.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/rsc/GFEResource.java @@ -162,6 +162,7 @@ import com.vividsolutions.jts.geom.Envelope; * Nov 08, 2012 1298 rferrel Changes for non-blocking FuzzValueDialog. * Mar 04, 2013 1637 randerso Fix time matching for ISC grids * Aug 27, 2013 2287 randerso Fixed scaling and direction of wind arrows + * Dec 11, 2013 2621 randerso Removed conditional from getParm so it never returns null * * * @@ -341,11 +342,7 @@ public class GFEResource extends * @return Returns the parm associated with the GFE Resource */ public Parm getParm() { - Parm retVal = null; - if (this.getStatus() != ResourceStatus.DISPOSED) { - retVal = this.parm; - } - return retVal; + return this.parm; } /* diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/SmartUtil.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/SmartUtil.java index 3f83503148..16fcc25c37 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/SmartUtil.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/SmartUtil.java @@ -36,14 +36,13 @@ import com.raytheon.viz.gfe.core.DataManager; import com.raytheon.viz.gfe.core.parm.Parm; import com.raytheon.viz.gfe.smartscript.FieldDefinition; import com.raytheon.viz.gfe.smarttool.script.SmartToolBlockingSelectionDlg; -import com.raytheon.viz.gfe.smarttool.script.SmartToolJob; import com.raytheon.viz.gfe.smarttool.script.SmartToolRequest; import com.raytheon.viz.gfe.smarttool.script.SmartToolSelectionDlg; import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg; /** * Utilities for smart tools - * + * *
  * SOFTWARE HISTORY
  * Date         Ticket#    Engineer    Description
@@ -52,9 +51,10 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg;
  * Dec 1,  2009  1426      ryu         Add time range warning
  * Nov 15, 2012 1298       rferrel     Changes for non-blocking prcedures.
  * Jun 25, 2013  16065     ryu         Passing outerLevel to smart tool job.
- *
+ * Dec 10, 2013  #2367     dgilling    Use new SmartToolJobPool.
+ * 
  * 
- * + * * @author njensen * @version 1.0 */ @@ -67,7 +67,7 @@ public class SmartUtil { * Checks if LD_PRELOAD is set in the environment. If not, jep may have * issues importing modules. (Note that this presumes LD_PRELOAD was set * correctly to point at the python .so file). - * + * * @return if LD_PRELOAD is set */ public static boolean isLdPreloadSet() { @@ -118,7 +118,7 @@ public class SmartUtil { if (pi != null) { SmartToolRequest req = buildSmartToolRequest(dm, pi, true); if (req != null) { - SmartToolJob.enqueue(dm, req); + dm.getSmartToolJobPool().schedule(req); } } } @@ -145,8 +145,8 @@ public class SmartUtil { timeRange, editArea, emptyEditAreaFlag, MissingDataMode.valueFrom(missingDataMode)); PreviewInfo pi = new PreviewInfo(editAction, passErrors, parm); - final SmartToolRequest req = SmartUtil. - buildSmartToolRequest(dm, pi, false); + final SmartToolRequest req = SmartUtil.buildSmartToolRequest(dm, pi, + false); if (varDict != null) { req.setVarDict(varDict); @@ -195,7 +195,7 @@ public class SmartUtil { }); } - SmartToolJob.enqueue(dm, req); + dm.getSmartToolJobPool().schedule(req); return req.getResult(); } } diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJob.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJob.java deleted file mode 100644 index 0b58e7d93e..0000000000 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJob.java +++ /dev/null @@ -1,378 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.viz.gfe.smarttool.script; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import jep.JepException; - -import org.eclipse.core.runtime.IProgressMonitor; -import org.eclipse.core.runtime.IStatus; -import org.eclipse.core.runtime.Status; -import org.eclipse.core.runtime.jobs.Job; - -import com.raytheon.uf.common.dataplugin.gfe.StatusConstants; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.viz.core.jobs.AbstractQueueJob; -import com.raytheon.viz.gfe.Activator; -import com.raytheon.viz.gfe.core.DataManager; -import com.raytheon.viz.gfe.jobs.AsyncProgressJob; -import com.raytheon.viz.gfe.smarttool.EditAction; -import com.raytheon.viz.gfe.smarttool.SmartToolException; -import com.raytheon.viz.gfe.smarttool.Tool; - -/** - * Job for running smart tools off the UI thread - * - *
- *
- * SOFTWARE HISTORY
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Jan 19, 2010            njensen     Initial creation
- * Jan 18, 2013    1509  njensen  Garbage collect after running tool
- * Apr 03, 2013    1855  njensen   Never dispose interpreters until shutdown
- * Jun 25, 2013   16065    ryu     Clear undo parms list before tool execution
- *
- * 
- * - * @author njensen - * @version 1.0 - */ - -public class SmartToolJob extends AbstractQueueJob { - - /** - * Maximum number of jobs to keep for a given Data Manager. - */ - private final static int maxJobs = 3; - - /** - * Index of job with the queue. Will break code if not zero. - */ - private final static int QUEUE_JOB_INDEX = 0; - - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(SmartToolJob.class); - - private static Map> instanceMap = null; - - private DataManager dataMgr; - - /** - * The request being processed. - */ - private SmartToolRequest request = null; - - protected SmartToolJob(DataManager dataMgr) { - super("GFE Smart Tool Job"); - this.dataMgr = dataMgr; - } - - private void getRequest() throws InterruptedException { - if (instanceMap == null) { - request = null; - return; - } - - List jobList = instanceMap.get(dataMgr); - if (jobList == null || jobList.size() == 0 - || jobList.get(QUEUE_JOB_INDEX).queue == null) { - request = null; - } else { - request = jobList.get(QUEUE_JOB_INDEX).queue.poll(1000L, - TimeUnit.MILLISECONDS); - } - } - - @Override - protected IStatus run(IProgressMonitor monitor) { - SmartToolController python = null; - try { - python = SmartToolFactory.buildController(dataMgr); - } catch (JepException e) { - SmartToolJob.removeJob(dataMgr, this); - return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID, - "Error initializing smart tool python", e); - } - - try { - // Used req to wrap up request after leaving the synchronized - // region. - SmartToolRequest req = null; - while (monitor.isCanceled() == false) { - try { - getRequest(); - - // May have been canceled while waiting. - if (monitor.isCanceled()) { - break; - } - - synchronized (this) { - if (request != null) { - python.processFileUpdates(); - EditAction ea = request.getPreview() - .getEditAction(); - Job progressJob = new AsyncProgressJob( - ea.getItemName(), this); - progressJob.schedule(); - IStatus pjResult = Status.CANCEL_STATUS; - try { - if (request.getOuterLevel()) { - dataMgr.getParmOp().clearUndoParmList(); - } - Tool tool = new Tool(dataMgr.getParmManager(), - request.getPreview().getParm(), - ea.getItemName(), python); - tool.execute(ea.getItemName(), request - .getPreview().getParm(), - ea.getRefSet(), ea.getTimeRange(), - request.getVarDict(), ea - .getMissingDataMode(), monitor); - request.requestComplete(null); - pjResult = Status.OK_STATUS; - - } catch (SmartToolException e) { - pjResult = new Status(IStatus.WARNING, - Activator.PLUGIN_ID, - "Error in smart tool", e); - throw e; - } finally { - python.garbageCollect(); - progressJob.done(pjResult); - req = request; - request = null; - } - } - } - } catch (InterruptedException e) { - statusHandler.handle(Priority.PROBLEM, - "Smart tool thread interrupted", e); - break; - } catch (SmartToolException e) { - statusHandler.handle(Priority.PROBLEM, - "Error running tool ", e); - if (req != null) { - req.requestComplete(e); - } - } catch (Throwable t) { - statusHandler.handle(Priority.PROBLEM, - "Error running tool ", t); - if (req != null) { - req.requestComplete(t); - } - } finally { - if (req != null && req.getPreview() != null) { - this.dataMgr.getEditActionProcessor().wrapUpExecute( - req.getPreview(), true); - } - req = null; - } - } - } finally { - System.err.println("Shutdown instance of SmartToolJob"); - if (python != null) { - python.dispose(); - python = null; - } - } - - return Status.OK_STATUS; - } - - /** - * Remove a job from the Data Manger's job list. - * - * @param dataMgr - * - The job's data manager - * @param job - * - The job to remove - */ - private static synchronized void removeJob(DataManager dataMgr, - SmartToolJob job) { - if (instanceMap == null) { - return; - } - - List jobList = instanceMap.get(dataMgr); - - if (jobList != null) { - jobList.remove(job); - - // Removing job with queue remove job list so next request will set - // up new queue. - if (job.queue != null) { - jobList.clear(); - instanceMap.remove(dataMgr); - } - } - } - - /** - * This manages the scheduling of jobs to service a Data Manger's requests. - * - * @param dataMgr - * - Data Manger for the request - * @param request - * - The request to service - * @return state - true when job available to process request otherwise - * false and request is queued to wait for next available job - */ - public static synchronized boolean enqueue(DataManager dataMgr, - SmartToolRequest request) { - if (instanceMap == null) { - instanceMap = new HashMap>(); - } - - List jobList = instanceMap.get(dataMgr); - if (jobList == null) { - jobList = new ArrayList(); - // Add the first job which contains the queue used by all jobs in - // the list. - SmartToolJob job = new SmartToolJob(dataMgr); - jobList.add(job); - instanceMap.put(dataMgr, jobList); - job.setSystem(true); - job.schedule(); - } - boolean jobAvailable = false; - for (SmartToolJob job : jobList) { - if (job.request == null) { - jobAvailable = true; - break; - } - } - - // All jobs for data manager are busy, add another if we haven't reached - // the limit - if (!jobAvailable && jobList.size() < maxJobs) { - SmartToolJob job = new SmartToolJob(dataMgr); - job.setSystem(true); - jobList.add(job); - // Never used additional job's queue - job.queue = null; - job.schedule(); - jobAvailable = true; - } - - jobList.get(QUEUE_JOB_INDEX).enqueue(request); - return jobAvailable; - } - - /** - * This returns an array of two integers the first is the number of Smart - * Tool Jobs being processed and the second is the number in the queue - * waiting to be processed. - * - * @return cnts - */ - public static int[] getJobCount() { - int[] cnt = new int[] { 0, 0 }; - if (instanceMap != null) { - for (List jobList : instanceMap.values()) { - cnt[1] += jobList.get(QUEUE_JOB_INDEX).queue.size(); - for (SmartToolJob job : jobList) { - if (job.request != null) { - ++cnt[0]; - } - } - } - } - return cnt; - } - - /** - * Determine if there are any Smart Tool Jobs queued and/or being processed. - * - * @return true when there are job(s)s queued or being processed otherwise - * false - */ - public static boolean haveJobs() { - boolean result = false; - - if (instanceMap != null) { - for (List jobList : instanceMap.values()) { - // Any pending requests. - if (jobList.get(QUEUE_JOB_INDEX).queue.size() > 0) { - result = true; - break; - } - - // Any requests being processed. - for (SmartToolJob job : jobList) { - if (job.request != null) { - result = true; - break; - } - } - } - } - return result; - } - - /** - * This terminates all the Data Managers' jobs. - */ - public static synchronized void shutdown() { - // TODO This currently joins with a job waiting for it to finish which - // can take a long time and may even be waiting for user input. Must - // find a wait to kill any GUI associated with a request and if python - // running a way to terminate it so no waiting is involved. - if (instanceMap != null) { - for (List jobList : instanceMap.values()) { - jobList.get(QUEUE_JOB_INDEX).queue.clear(); - - // Do in reverse order so last job cancel is the one with the - // queue. - for (int index = jobList.size() - 1; index >= 0; --index) { - jobList.get(index).cancel(); - } - } - - for (List jobList : instanceMap.values()) { - for (SmartToolJob job : jobList) { - synchronized (job) { - try { - if (job.getState() != Job.NONE) { - job.join(); - } - } catch (InterruptedException ex) { - // System.err.println("here SmartToolJob"); - } - } - } - } - - for (List jobList : instanceMap.values()) { - jobList.clear(); - } - - instanceMap.clear(); - instanceMap = null; - } - } -} diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJobPool.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJobPool.java new file mode 100644 index 0000000000..4cbcd3d97c --- /dev/null +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolJobPool.java @@ -0,0 +1,377 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.viz.gfe.smarttool.script; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import jep.JepException; + +import org.eclipse.core.runtime.IProgressMonitor; +import org.eclipse.core.runtime.IStatus; +import org.eclipse.core.runtime.Status; +import org.eclipse.core.runtime.jobs.Job; + +import com.raytheon.uf.common.dataplugin.gfe.StatusConstants; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.time.util.TimeUtil; +import com.raytheon.viz.gfe.Activator; +import com.raytheon.viz.gfe.core.DataManager; +import com.raytheon.viz.gfe.jobs.AsyncProgressJob; +import com.raytheon.viz.gfe.smarttool.EditAction; +import com.raytheon.viz.gfe.smarttool.SmartToolException; +import com.raytheon.viz.gfe.smarttool.Tool; + +/** + * Job pool for running smart tools off the UI thread. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Dec 09, 2013  #2367    dgilling     Initial creation
+ * 
+ * 
+ * + * @author dgilling + * @version 1.0 + */ + +public class SmartToolJobPool { + + protected LinkedBlockingQueue workQueue = new LinkedBlockingQueue(); + + protected LinkedBlockingQueue jobQueue = new LinkedBlockingQueue(); + + protected List jobList; + + protected boolean cancel = false; + + protected Object cancelLock = new Object(); + + protected Object joinLock = new Object(); + + private final DataManager dataMgr; + + private final int poolMaxSize; + + /** + * Creates a new SmartToolJobPool with the specified size parameters. + * + * @param corePoolSize + * The minimum size of the job pool--will always have at least + * this many Jobs ready to execute. + * @param poolMaxSize + * The maximum size of the job pool. + * @param dataMgr + * DataManager instance. + */ + public SmartToolJobPool(int corePoolSize, int poolMaxSize, + DataManager dataMgr) { + this.dataMgr = dataMgr; + this.poolMaxSize = poolMaxSize; + for (int i = 0; i < corePoolSize; i++) { + Job job = new SmartToolJob(this.dataMgr); + jobQueue.add(job); + } + this.jobList = new CopyOnWriteArrayList(); + } + + /** + * Enqueue the specified request into the job pool's request queue. Will be + * worked by first available job. + * + * @param request + * SmartToolRequest containing information on procedure to + * execute. + */ + public void schedule(SmartToolRequest request) { + // do not schedule while canceling(cancel should be fast). + synchronized (cancelLock) { + if (cancel) { + return; + } + // do not schedule while joining, join might be slow but the javaDoc + // warns others. + synchronized (joinLock) { + if (!isJobAvailable()) { + Job job = jobQueue.poll(); + if ((job == null) && (jobList.size() < poolMaxSize)) { + job = new SmartToolJob(dataMgr); + } + if (job != null) { + job.schedule(); + jobList.add(job); + } + } + workQueue.offer(request); + } + } + } + + private boolean isJobAvailable() { + for (Job job : jobList) { + SmartToolJob toolJob = (SmartToolJob) job; + if (!toolJob.isRunning()) { + return true; + } + } + + return false; + } + + /** + * Join on the Jobs in the pool. Attempting to schedule other Jobs will + * block until join has returned so be careful when calling + */ + public void join() { + synchronized (joinLock) { + for (Job j : jobList) { + try { + j.join(); + } catch (InterruptedException e) { + // Ignore interupt + } + } + } + } + + /** + * Cancel the job pool, will clear out the workQueue then join on all jobs + * running. Once canceled all future calls to schedule will be ignored. + */ + public void cancel() { + cancel(true); + } + + /** + * Cancel the job pool, will clear out the workQueue and optionally join + * running jobs. Once canceled all future calls to schedule will be ignored. + * + * @param join + * true if you want to join before returning. + */ + public void cancel(boolean join) { + synchronized (cancelLock) { + cancel = true; + workQueue.clear(); + for (Job j : jobList) { + j.cancel(); + } + } + if (join) { + join(); + } + } + + /** + * Cancels the specified request. Returns true if the provided request was + * waiting to be run but now is not. Returns false if the provided request + * is already running or if it was not enqueued to begin with. + * + * @param request + * The request to cancel. + * @return True, if the request was in the queue. False, if it was already + * being worked by the pool or if it was not in the queue. + */ + public boolean cancel(SmartToolRequest request) { + return workQueue.remove(request); + } + + /** + * A job pool is considered active if any of the jobs it contains are + * servicing a request or there is still requests to be worked off in the + * queue. + * + * @return If any jobs are working off a request or there are requests still + * in the work queue. + */ + public boolean isActive() { + if (!workQueue.isEmpty()) { + return true; + } + for (Job job : jobList) { + SmartToolJob toolJob = (SmartToolJob) job; + if (toolJob.isRunning()) { + return true; + } + } + return false; + } + + /** + * Get the number requests remaining in the queue and the number of jobs in + * the pool currently working off a request. + * + * @return The number requests remaining in the queue and the number of jobs + * in the pool currently working off a request. + */ + public int[] getWorkRemaining() { + int jobsRunning = 0; + for (Job job : jobList) { + SmartToolJob toolJob = (SmartToolJob) job; + if (toolJob.isRunning()) { + jobsRunning++; + } + } + + return new int[] { jobsRunning, workQueue.size() }; + } + + protected class SmartToolJob extends Job { + + private final IUFStatusHandler statusHandler = UFStatus + .getHandler(SmartToolJob.class); + + private SmartToolController python; + + private final DataManager dataMgr; + + private volatile boolean running; + + public SmartToolJob(DataManager dataMgr) { + super("GFE Smart Tool Job"); + this.dataMgr = dataMgr; + this.running = false; + setSystem(true); + } + + @Override + protected IStatus run(IProgressMonitor monitor) { + try { + python = SmartToolFactory.buildController(dataMgr); + } catch (JepException e) { + jobList.remove(this); + statusHandler.error("Error initializing procedure python", e); + return new Status(IStatus.ERROR, StatusConstants.PLUGIN_ID, + "Error initializing procedure python", e); + } + + IStatus statusCode = Status.OK_STATUS; + try { + while (!monitor.isCanceled()) { + try { + SmartToolRequest request = null; + try { + request = workQueue.poll( + TimeUtil.MILLIS_PER_SECOND, + TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + statusCode = Status.CANCEL_STATUS; + break; + } + + if (monitor.isCanceled()) { + statusCode = Status.CANCEL_STATUS; + break; + } + + if (request != null) { + running = true; + + python.processFileUpdates(); + if (monitor.isCanceled()) { + statusCode = Status.CANCEL_STATUS; + break; + } + + Object retVal = null; + try { + execute(python, request, monitor); + retVal = null; + } catch (Throwable t) { + String toolName = request.getPreview() + .getEditAction().getItemName(); + statusHandler.error("Error running smart tool " + + toolName, t); + retVal = t; + } finally { + if (request.getPreview() != null) { + dataMgr.getEditActionProcessor() + .wrapUpExecute( + request.getPreview(), true); + } + request.requestComplete(retVal); + running = false; + } + } + } catch (Throwable t) { + statusHandler.error( + "Unhandled exception in SmartToolJob.", t); + } + } + } finally { + if (python != null) { + python.dispose(); + python = null; + } + } + + return statusCode; + } + + /** + * Executes a smart tool. + * + * @param controller + * @param request + * @param monitor + * @throws SmartToolException + */ + private void execute(SmartToolController controller, + SmartToolRequest request, IProgressMonitor monitor) + throws SmartToolException { + EditAction ea = request.getPreview().getEditAction(); + String toolName = ea.getItemName(); + + Job progressJob = new AsyncProgressJob(toolName, this); + progressJob.schedule(); + IStatus pjStatus = Status.CANCEL_STATUS; + + try { + if (request.getOuterLevel()) { + dataMgr.getParmOp().clearUndoParmList(); + } + Tool tool = new Tool(dataMgr.getParmManager(), request + .getPreview().getParm(), ea.getItemName(), python); + tool.execute(ea.getItemName(), request.getPreview().getParm(), + ea.getRefSet(), ea.getTimeRange(), + request.getVarDict(), ea.getMissingDataMode(), monitor); + pjStatus = Status.OK_STATUS; + } catch (SmartToolException e) { + pjStatus = new Status(IStatus.WARNING, Activator.PLUGIN_ID, + "Error in smart tool " + toolName, e); + throw e; + } finally { + controller.garbageCollect(); + progressJob.done(pjStatus); + } + } + + public boolean isRunning() { + return running; + } + } +} diff --git a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolSelectionDlg.java b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolSelectionDlg.java index 2646891061..40f88306ec 100644 --- a/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolSelectionDlg.java +++ b/cave/com.raytheon.viz.gfe/src/com/raytheon/viz/gfe/smarttool/script/SmartToolSelectionDlg.java @@ -31,17 +31,18 @@ import com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg; /** * Dynamic GUI for showing smart tools' Variable Lists and running the tools - * + * *
- *
+ * 
  * SOFTWARE HISTORY
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * Feb 9, 2010            njensen     Initial creation
  * Jun 25, 2013  16065    ryu         Passing outerLevel to tool job
- *
+ * Dec 10, 2013  #2367    dgilling    Use new SmartToolJobPool.
+ * 
  * 
- * + * * @author njensen * @version 1.0 */ @@ -55,20 +56,20 @@ public class SmartToolSelectionDlg extends SelectionDlg { /* * (non-Javadoc) - * + * * @see com.raytheon.viz.gfe.ui.runtimeui.SelectionDlg#run() */ @Override public void run() { PreviewInfo pi = SmartUtil.checkAndBuildPreview(dataMgr, name); if (pi != null) { - SmartToolRequest req = SmartUtil. - buildSmartToolRequest(dataMgr, pi, true); + SmartToolRequest req = SmartUtil.buildSmartToolRequest(dataMgr, pi, + true); if (req != null) { String varDict = dataMgr.getSmartToolInterface() .transformVarDict(getValues()); req.setVarDict(varDict); - SmartToolJob.enqueue(dataMgr, req); + dataMgr.getSmartToolJobPool().schedule(req); } } } diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/GeneralGridData.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/GeneralGridData.java index 862861e277..86f3a21327 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/GeneralGridData.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/GeneralGridData.java @@ -54,11 +54,14 @@ import com.vividsolutions.jts.geom.Coordinate; * * SOFTWARE HISTORY * - * Date Ticket# Engineer Description - * ------------ ---------- ----------- -------------------------- - * Mar 9, 2011 bsteffen Initial creation - * Aug 27, 2013 #2287 randerso Removed 180 degree adjustment required by error - * in Maputil.rotation + * Date Ticket# Engineer Description + * ------------- -------- ----------- -------------------------- + * Mar 9, 2011 bsteffen Initial creation + * Aug 27, 2013 2287 randerso Removed 180 degree adjustment required by error + * in Maputil.rotation + * Dec 09, 2013 2617 bsteffen Added 180 degree rotation into reproject + * so wind direction is calculated as + * direction wind is coming from. * * * @@ -249,8 +252,29 @@ public class GeneralGridData { Coordinate ll = new Coordinate(dp.x, dp.y); double rot = MapUtil.rotation(ll, newGeom); double rot2 = MapUtil.rotation(ll, gridGeometry); - double cos = Math.cos(Math.toRadians(rot - rot2)); - double sin = Math.sin(Math.toRadians(rot - rot2)); + /* + * When code calls into this method, the observed state + * of things is that u and v represent the direction + * the vector is going while mag and dir represent + * the direction the vector is coming from. The extra + * 180 here makes everything consistently represent the + * direction the vector is coming from so that when the + * barbs or arrows are rendered the mag and dir are + * calculated as expected. Overall this is a completely + * rediculous way of doing things. During construction + * everything should be forced to represent the vector + * consistently and we should only be keeping either + * u/v or mag/dir to minimize memory consumption. + * Unfortunately that is a significant change which is + * made high risk by the fact no one documents which + * areas are expecting vectors oriented to vs from. So + * for now I(bsteffen) have chosen to simply add in 180 + * so that the behavior will be exactly as it was before + * 2287 because even though it is rediculous it is a well + * tested rediculous(theoretically). + */ + double cos = Math.cos(Math.toRadians(rot - rot2 + 180)); + double sin = Math.sin(Math.toRadians(rot - rot2 + 180)); double u = udata[index]; double v = vdata[index]; udata[index] = (float) (cos * u - sin * v); diff --git a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/ui/xy/RadarXsectXYResource.java b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/ui/xy/RadarXsectXYResource.java index b8f096de02..8384c081fc 100644 --- a/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/ui/xy/RadarXsectXYResource.java +++ b/cave/com.raytheon.viz.radar/src/com/raytheon/viz/radar/ui/xy/RadarXsectXYResource.java @@ -58,6 +58,7 @@ import com.vividsolutions.jts.geom.LineString; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Aug 17, 2010 bsteffen Initial creation + * Dec 11, 2013 DR 16795 D. Friedman Transform pixel coordinate in inspect * * * @@ -213,11 +214,13 @@ public class RadarXsectXYResource extends RadarXYResource implements DrawableImage image = images.get(displayedDate); try { Coordinate c = latLon.asLatLon(); + double[] worldCoord = descriptor.pixelToWorld(new double[] { + c.x, c.y }); IExtent extent = image.getCoverage().getExtent(); // Convert the screen coordinate to a coordinate within the image. // 0,0 is the upper left and 1,1 is the lower right of the iamge. - double xRat = (c.x - extent.getMinX()) / extent.getWidth(); - double yRat = (c.y - extent.getMinY()) / extent.getHeight(); + double xRat = (worldCoord[0] - extent.getMinX()) / extent.getWidth(); + double yRat = (worldCoord[1] - extent.getMinY()) / extent.getHeight(); return super.inspect(new ReferencedCoordinate(new Coordinate(xRat, yRat))); } catch (Exception e) { diff --git a/edexOsgi/build.edex/esb/conf/modes.xml b/edexOsgi/build.edex/esb/conf/modes.xml index 86d13b4bcd..babb2749d6 100644 --- a/edexOsgi/build.edex/esb/conf/modes.xml +++ b/edexOsgi/build.edex/esb/conf/modes.xml @@ -118,6 +118,11 @@ manualIngest-common.xml manualIngest-spring.xml shef-ingest.xml + persist-ingest.xml + obs-common.xml + obs-ingest.xml + metartohmdb-plugin.xml + pointdata-common.xml shef-common.xml ohd-common.xml alarmWhfs-spring.xml @@ -139,6 +144,8 @@ q2FileProcessor-spring.xml satpre-spring.xml purge-logs.xml + fssobs-ingest.xml + fssobs-common.xml ohd-common.xml @@ -147,6 +154,7 @@ alertviz-request.xml auth-common.xml auth-request.xml + persist-request.xml menus-request.xml utility-request.xml management-common.xml diff --git a/edexOsgi/com.raytheon.edex.plugin.bufrua/src/com/raytheon/edex/plugin/bufrua/decoder/BUFRUAManLevelAdapter.java b/edexOsgi/com.raytheon.edex.plugin.bufrua/src/com/raytheon/edex/plugin/bufrua/decoder/BUFRUAManLevelAdapter.java index 395e8060eb..83cb6d0052 100644 --- a/edexOsgi/com.raytheon.edex.plugin.bufrua/src/com/raytheon/edex/plugin/bufrua/decoder/BUFRUAManLevelAdapter.java +++ b/edexOsgi/com.raytheon.edex.plugin.bufrua/src/com/raytheon/edex/plugin/bufrua/decoder/BUFRUAManLevelAdapter.java @@ -21,7 +21,12 @@ package com.raytheon.edex.plugin.bufrua.decoder; import static com.raytheon.uf.edex.decodertools.bufr.packets.DataPacketTypes.RepSubList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import com.raytheon.uf.common.dataplugin.bufrua.LayerTools; import com.raytheon.uf.common.dataplugin.bufrua.UAObs; @@ -42,6 +47,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao; * ------------- -------- ----------- -------------------------- * Mar 03, 2008 969 jkorman Initial implementation. * Dec 05, 2013 2612 bsteffen Fix max wind decoding. + * Dec 17, 2013 2639 bsteffen Validate mandatory level heights. * * * @@ -50,6 +56,20 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao; */ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { + /** Mandatory pressure levels */ + private static final float[] VALID_PR = { 100000, 92500, 85000, 70000, + 50000, 40000, 30000, 25000, 20000, 15000, 10000, 5000 }; + + /** Reasonable height levels corresponding to VALID_PR */ + private static final float[] VALID_HT = { 100, 750, 1450, 3000, 5550, 7150, + 9150, 10350, 11800, 13600, 16150, 20000 }; + + /** Map VALID_PR to VALID_HT values. */ + private static final Map VALID_HEIGHT_MAP = generateValidHeights(); + + /** Reasonable range for reasonable heights in VALID_HT */ + private static final float VALID_HEIGHT_RANGE = 1000; + /** * * @param pdd @@ -98,7 +118,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { int maxManLevels = -1; int maxTropLevels = -1; - float sfcPressure = -9999; + float sfcPressure = PDV_FILL_INT; Dimension[] dims = getPointDataDescription().dimensions; for (Dimension d : dims) { @@ -120,21 +140,21 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { List p = (List) packet .getValue(); int sig = getInt(p.get(1), IDecoderConstants.VAL_MISSING); - double pres = getDouble(p.get(0), -9999); + double pres = getDouble(p.get(0), PDV_FILL_DBL); switch (sig) { case LayerTools.TROP_LEVEL: { // Tropopause level if ((tropIdx < maxTropLevels) && (pres > 0) && (pres != 99900.0)) { setViewData("prTrop", view, p.get(0), tropIdx); - double t = getDouble(p.get(3), -9999); - if (t < -9999) { - t = -9999.0; + double t = getDouble(p.get(3), PDV_FILL_DBL); + if (t < PDV_FILL_DBL) { + t = PDV_FILL_DBL; } view.setFloat("tpTrop", (float) t, tropIdx); - t = getDouble(p.get(4), -9999); - if (t < -9999) { - t = -9999.0; + t = getDouble(p.get(4), PDV_FILL_DBL); + if (t < PDV_FILL_DBL) { + t = PDV_FILL_DBL; } view.setFloat("tdTrop", (float) t, tropIdx); setViewData("wdTrop", view, p.get(5), tropIdx); @@ -144,7 +164,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { break; } case LayerTools.SFC_LEVEL: { - sfcPressure = (float) getDouble(p.get(0), -9999); + sfcPressure = (float) getDouble(p.get(0), PDV_FILL_DBL); // fall through } case LayerTools.MANPRE_LEVEL: { @@ -152,14 +172,14 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { if ((manIdx < maxManLevels) && (pres > 0)) { setViewData("prMan", view, p.get(0), manIdx); setViewData("htMan", view, p.get(2), manIdx); - double t = getDouble(p.get(3), -9999); - if (t < -9999) { - t = -9999.0; + double t = getDouble(p.get(3), PDV_FILL_DBL); + if (t < PDV_FILL_DBL) { + t = PDV_FILL_DBL; } view.setFloat("tpMan", (float) t, manIdx); - t = getDouble(p.get(4), -9999); - if (t < -9999) { - t = -9999.0; + t = getDouble(p.get(4), PDV_FILL_DBL); + if (t < PDV_FILL_DBL) { + t = PDV_FILL_DBL; } view.setFloat("tdMan", (float) t, manIdx); setViewData("wdMan", view, p.get(5), manIdx); @@ -168,12 +188,13 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { } break; } - // No default! + // No default! } // switch } // for view.setInt("numMand", manIdx); view.setInt("numTrop", tropIdx); view.setFloat("sfcPressure", sfcPressure); + removeInvalidHeights(view); } return pointData; } @@ -209,7 +230,7 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { .getValue(); int sig = getInt(p.get(1), IDecoderConstants.VAL_MISSING); if (sig == LayerTools.MAXWND_LEVEL) { - double pres = getDouble(p.get(0), -9999); + double pres = getDouble(p.get(0), PDV_FILL_DBL); if (pres > 0) { setViewData("prMaxW", view, p.get(0), maxWindIdx); setViewData("wdMaxW", view, p.get(2), maxWindIdx); @@ -225,4 +246,77 @@ public class BUFRUAManLevelAdapter extends AbstractBUFRUAAdapter { } return pointData; } + + /** + * Check the heights for each reading, removing invalid readings. Check + * that heights are within the range specified from the mean value and that + * they are between the preceeding and following values. + * + * One reason this is needed is because there is a known error in the + * encoded data when the height for the 250MB level is less than 10000. For + * these cases the encoder is prepending a 1 so a height of 9990 becomes + * 19990. It appears this may be an artifact of the compression used to + * encode the heights. For this case it would be theoretically possible to + * remove the extra 1 and treat the data as valid, but invalidating the + * height is done because it is not clear if this would always be a safe + * fix or if there are other possible errors to detect. + * + * @param view + * {@link PointDataView} which will be modified to have invalid + * mandataory hight data removed. + */ + private void removeInvalidHeights(PointDataView view) { + int numMand = view.getInt("numMand"); + if (numMand < 3) { + return; + } + /* Convert pressure and height data into a map for easy access. */ + Number[] pr = view.getNumberAllLevels("prMan"); + Number[] ht = view.getNumberAllLevels("htMan"); + Map heights = new HashMap(numMand * 2); + for (int i = 0; i < numMand; i += 1) { + heights.put(pr[i].floatValue(), ht[i].floatValue()); + } + /* Check each predefined level. */ + Set invalidPrLevels = new HashSet(); + for (int i = 1; i < VALID_PR.length - 1; i += 1) { + float prLevel = VALID_PR[i]; + float validHt = VALID_HEIGHT_MAP.get(prLevel); + float minHt = validHt - VALID_HEIGHT_RANGE; + float maxHt = validHt + VALID_HEIGHT_RANGE; + Float testHt = heights.get(prLevel); + /* First detect values which don't look reasonable. */ + if (testHt != null && testHt > PDV_FILL_INT + && (minHt > testHt || maxHt < testHt)) { + float prevPr = VALID_PR[i - 1]; + float nextPr = VALID_PR[i + 1]; + Float prevHt = heights.get(prevPr); + Float nextHt = heights.get(nextPr); + /* Next check if its at least ascending. */ + if (prevHt != null && prevHt > PDV_FILL_INT && nextHt != null + && nextHt > PDV_FILL_INT + && (testHt < prevHt || testHt > nextHt)) { + invalidPrLevels.add(prLevel); + } + } + } + + if (invalidPrLevels.isEmpty()) { + return; + } + + for (int i = 0; i < numMand; i += 1) { + if (invalidPrLevels.contains(pr[i].floatValue())) { + view.setFloat("htMan", PDV_FILL_INT, i); + } + } + } + + private static Map generateValidHeights() { + Map validHeights = new HashMap(); + for (int i = 0; i < VALID_HT.length; i += 1) { + validHeights.put(VALID_PR[i], VALID_HT[i]); + } + return Collections.unmodifiableMap(validHeights); + } } diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java index 3e9020f91a..feeb19d645 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/GridParmManager.java @@ -108,7 +108,6 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger; * 10/02/13 #2444 randerso Fix error handling when creating IFPGridDatabases. * DO NOT ATTEMPT TO MERGE THIS CHANGE INTO 14.2 as the GFE * server code has been significantly refactored. - * 12/03/13 #2595 randerso Added check for null update time in commitGrid * * * @@ -595,8 +594,7 @@ public class GridParmManager { // if update time is less than publish time, grid has not // changed since last published, therefore only update // history, do not publish - if ((gdh.getUpdateTime() == null) - || (gdh.getPublishTime() == null) + if ((gdh.getPublishTime() == null) || (gdh.getUpdateTime().getTime() > gdh .getPublishTime().getTime()) // in service backup, times on srcHistory could diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java index a44ad62234..6f47e5e22d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/IFPGridDatabase.java @@ -86,6 +86,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.PerformanceStatus; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.time.SimulatedTime; import com.raytheon.uf.common.time.TimeRange; import com.raytheon.uf.common.time.util.ITimer; import com.raytheon.uf.common.time.util.TimeUtil; @@ -119,6 +120,7 @@ import com.vividsolutions.jts.geom.Coordinate; * 03/20/13 #1774 randerso Cleanup code to use proper constructors * 04/08/13 #1949 rjpeter Updated to work with normalized database. * 05/02/13 #1969 randerso Removed updateDbs from parent class + * 12/10/13 #2611 randerso Change saveGridData to set update time when saving grids * * * @author bphillip @@ -964,6 +966,14 @@ public class IFPGridDatabase extends GridDatabase { // track merge with existing records or add to new list for (GFERecord recToSave : recordsToSave) { + // modify update time for non ISC/Official db + if (!this.dbId.getModelName().equals("ISC") + && !this.dbId.getModelName().equals("Official")) { + Date nowTime = SimulatedTime.getSystemTime().getTime(); + for (GridDataHistory history : recToSave.getGridHistory()) { + history.setUpdateTime(nowTime); + } + } TimeRange tr = recToSave.getTimeRange(); GFERecord existing = existingMap.get(tr); if (existing != null) { diff --git a/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/metar/MetarPointDataTransform.java b/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/metar/MetarPointDataTransform.java index 8ca48f7421..ee1e76c41e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/metar/MetarPointDataTransform.java +++ b/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/metar/MetarPointDataTransform.java @@ -60,6 +60,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools; * May 09, 2013 1869 bsteffen Modified D2D time series of point data to * work without dataURI. * Aug 30, 2013 2298 rjpeter Make getPluginName abstract + * Dec 16, 2013 DR 16920 D. Friemdan Fix type of tempFromTenths access. * * * @@ -398,7 +399,7 @@ public class MetarPointDataTransform { mr.setTemperature(pdv.getNumber(TEMPERATURE).intValue()); mr.setDewPoint(pdv.getNumber(DEWPOINT).intValue()); - mr.setTempFromTenths(pdv.getNumber(TEMP_FROM_TENTHS).intValue()); + mr.setTempFromTenths(pdv.getNumber(TEMP_FROM_TENTHS).floatValue()); mr.setDewPointFromTenths(pdv.getNumber(DP_FROM_TENTHS).floatValue()); mr.setMinTemp6Hour(pdv.getNumber(MIN_TEMP6_HOUR).floatValue()); diff --git a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-common.xml b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-common.xml index 0fdf5c4a1c..eb21ea0ed3 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-common.xml +++ b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-common.xml @@ -56,5 +56,10 @@ - + + + + + \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml index 945f5599a4..f2c2cdc1bc 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml +++ b/edexOsgi/com.raytheon.edex.plugin.text/res/spring/text-ingest.xml @@ -14,11 +14,6 @@ - - - - @@ -52,6 +47,10 @@ + + + + * * @author * @version 1 */ public class TextDao extends DefaultPluginDao { + private static final int fullPurgeInterval; + + static { + String fullPurgeProperty = System.getProperty( + "text.fullVersionPurge.intervalhours", "3"); + Integer val = null; + try { + val = Integer.parseInt(fullPurgeProperty); + if ((val < 0) || (val > 23)) { + + } + } catch (Exception e) { + val = new Integer(3); + } + fullPurgeInterval = val.intValue(); + } public TextDao(String pluginName) throws PluginException { super(pluginName); @@ -71,7 +89,7 @@ public class TextDao extends DefaultPluginDao { // only do full purge every few hours since incremental purge runs every // minute - if (Calendar.getInstance().get(Calendar.HOUR_OF_DAY) % 3 == 0) { + if ((TimeUtil.newGmtCalendar().get(Calendar.HOUR_OF_DAY) % fullPurgeInterval) == 0) { TextDB.purgeStdTextProducts(); } @@ -79,10 +97,9 @@ public class TextDao extends DefaultPluginDao { "text"); } - @SuppressWarnings("unchecked") @Override - public List getRecordsToArchive( - Calendar insertStartTime, Calendar insertEndTime) + public int processArchiveRecords(Calendar insertStartTime, + Calendar insertEndTime, IDatabaseProcessor processor) throws DataAccessLayerException { StdTextProductDao dao = new StdTextProductDao(true); DatabaseQuery dbQuery = new DatabaseQuery(dao.getDaoClass()); @@ -91,8 +108,9 @@ public class TextDao extends DefaultPluginDao { dbQuery.addQueryParam("insertTime", insertEndTime, QueryOperand.LESSTHAN); dbQuery.addOrder("insertTime", true); + dbQuery.addOrder("refTime", true); - return (List) dao.queryByCriteria(dbQuery); + return this.processByCriteria(dbQuery, processor); } @Override diff --git a/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java index b795ffb25e..f9448db854 100644 --- a/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java +++ b/edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/maintenance/archiver/TextArchiveFileNameFormatter.java @@ -19,15 +19,7 @@ **/ package com.raytheon.edex.plugin.text.maintenance.archiver; -import java.util.ArrayList; -import java.util.Calendar; import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider; import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; @@ -35,7 +27,6 @@ import com.raytheon.uf.common.dataplugin.text.db.StdTextProduct; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter; -import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.plugin.PluginDao; /** @@ -47,8 +38,9 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 20, 2012 dgilling Initial creation - * Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter. + * Apr 20, 2012 dgilling Initial creation + * Nov 05, 2013 2499 rjpeter Moved IPluginArchiveFileNameFormatter. + * Dec 13, 2013 2555 rjpeter Refactored. * * * @author dgilling @@ -65,60 +57,26 @@ public class TextArchiveFileNameFormatter implements * (non-Javadoc) * * @see - * com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter - * #getPdosByFile(java.lang.String, - * com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map, - * java.util.Calendar, java.util.Calendar) + * com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter#getFilename + * (java.lang.String, com.raytheon.uf.edex.database.plugin.PluginDao, + * com.raytheon.uf.common.dataplugin.persist.PersistableDataObject) */ - @SuppressWarnings("rawtypes") @Override - public Map> getPdosByFile( - String pluginName, PluginDao dao, - Map> pdoMap, - Calendar startTime, Calendar endTime) - throws DataAccessLayerException { - List pdos = dao.getRecordsToArchive(startTime, - endTime); + public String getFilename(String pluginName, PluginDao dao, + PersistableDataObject pdo) { + String path = null; + if (pdo instanceof StdTextProduct) { + StdTextProduct casted = (StdTextProduct) pdo; - Set newFileEntries = new HashSet(); - if ((pdos != null) && !pdos.isEmpty()) { - if (pdos.get(0) instanceof StdTextProduct) { - for (PersistableDataObject pdo : pdos) { - StdTextProduct casted = (StdTextProduct) pdo; - - // no refTime to use, so we use creation time - Date time = new Date(casted.getRefTime()); - String path = pluginName - + DefaultPathProvider.fileNameFormat.get().format( - time); - - newFileEntries.add(path); - List list = pdoMap.get(path); - if (list == null) { - list = new ArrayList(pdos.size()); - pdoMap.put(path, list); - } - list.add(pdo); - } - } else { - statusHandler.error("Invalid PersistableDataObject class " - + pdos.get(0).getClass() - + "sent to TextArchiveFileNameFormatter to archive"); - } + // no refTime to use, so we use creation time + Date time = new Date(casted.getRefTime()); + path = pluginName + + DefaultPathProvider.fileNameFormat.get().format(time); + } else { + statusHandler.error("Invalid PersistableDataObject class " + + pdo.getClass() + + "sent to TextArchiveFileNameFormatter to archive"); } - - Iterator iter = pdoMap.keySet().iterator(); - Map> pdosToSave = new HashMap>( - pdoMap.size() - newFileEntries.size()); - - while (iter.hasNext()) { - String key = iter.next(); - if (!newFileEntries.contains(key)) { - pdosToSave.put(key, pdoMap.get(key)); - iter.remove(); - } - } - - return pdosToSave; + return path; } } diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java index 3c82b24d5e..0943772df9 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/ArchiveConfigManager.java @@ -31,7 +31,6 @@ import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -88,6 +87,8 @@ import com.raytheon.uf.common.util.FileUtil; * Jul 24, 2013 2221 rferrel Changes for select configuration. * Aug 06, 2013 2224 rferrel Changes to use DataSet. * Aug 28, 2013 2299 rferrel purgeExpiredFromArchive now returns the number of files purged. + * Dec 04, 2013 2603 rferrel Changes to improve archive purging. + * Dec 17, 2013 2603 rjpeter Fix directory purging. * * * @author rferrel @@ -189,23 +190,31 @@ public class ArchiveConfigManager { String fileName = ArchiveConstants.selectFileName(Type.Retention, null); SelectConfig selections = loadSelection(fileName); if ((selections != null) && !selections.isEmpty()) { - try { - for (ArchiveSelect archiveSelect : selections.getArchiveList()) { - ArchiveConfig archiveConfig = archiveMap.get(archiveSelect - .getName()); - for (CategorySelect categorySelect : archiveSelect - .getCategorySelectList()) { - CategoryConfig categoryConfig = archiveConfig - .getCategory(categorySelect.getName()); - categoryConfig.setSelectedDisplayNames(categorySelect - .getSelectList()); - } + for (ArchiveSelect archiveSelect : selections.getArchiveList()) { + String archiveName = archiveSelect.getName(); + ArchiveConfig archiveConfig = archiveMap.get(archiveName); + if (archiveConfig == null) { + statusHandler.handle(Priority.WARN, + "Archive Configuration [" + archiveName + + "] not found. Skipping selections."); + continue; + } + + for (CategorySelect categorySelect : archiveSelect + .getCategorySelectList()) { + String categoryname = categorySelect.getName(); + CategoryConfig categoryConfig = archiveConfig + .getCategory(categoryname); + if (categoryConfig == null) { + statusHandler.handle(Priority.WARN, + "Archive Configuration [" + archiveName + + "] Category [" + categoryname + + "] not found. Skipping selections."); + continue; + } + categoryConfig.setSelectedDisplayNames(categorySelect + .getSelectSet()); } - } catch (NullPointerException ex) { - statusHandler - .handle(Priority.ERROR, - "Retention selection and Archive configuration no longer in sync: ", - ex); } } return archiveMap.values(); @@ -285,7 +294,8 @@ public class ArchiveConfigManager { /** * Purge the Files that fall outside of the time frame constraints for the - * Archive. + * archive. This will always leave the archive's top level directories even + * when they are empty. * * @param archive * @return purgeCount @@ -293,107 +303,244 @@ public class ArchiveConfigManager { public int purgeExpiredFromArchive(ArchiveConfig archive) { String archiveRootDirPath = archive.getRootDir(); File archiveRootDir = new File(archiveRootDirPath); - - String[] topLevelDirs = archiveRootDir.list(); - - List topLevelDirsNotPurged = new ArrayList(); int purgeCount = 0; - if (topLevelDirs != null) { - topLevelDirsNotPurged.addAll(Arrays.asList(topLevelDirs)); - topLevelDirs = null; + if (!archiveRootDir.isDirectory()) { + statusHandler.error(archiveRootDir.getAbsolutePath() + + " not a directory."); + return purgeCount; } + if (statusHandler.isPriorityEnabled(Priority.INFO)) { + statusHandler.info("Purging directory: \"" + + archiveRootDir.getAbsolutePath() + "\"."); + } + + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + String message = String.format( + "Start setup of category date helpers for archive: %s.", + archive.getName()); + statusHandler.debug(message); + } + + Map helperMap = new HashMap(); for (CategoryConfig category : archive.getCategoryList()) { - Calendar purgeTime = calculateExpiration(archive, category); - CategoryFileDateHelper helper = new CategoryFileDateHelper( - category, archive.getRootDir()); - IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils - .fileFileFilter(), new FileDateFilter(null, purgeTime, - helper)); - - // Remove the directory associated with this category from the not - // purged list since it is being purged. - for (Iterator iter = topLevelDirsNotPurged.iterator(); iter - .hasNext();) { - String dirName = iter.next(); - if (helper.isCategoryDirectory(dirName)) { - iter.remove(); - break; - } - } - for (DisplayData display : getDisplayData(archive.getName(), - category.getName(), true)) { - List displayFiles = getDisplayFiles(display, null, - purgeTime); - for (File file : displayFiles) { - purgeCount += purgeFile(file, fileDateFilter); - } - } + CategoryFileDateHelper helper = new CategoryFileDateHelper(category); + helperMap.put(category, helper); } - // check for other expired in top level directories not covered - // by the categories in the archive. - Calendar defaultPurgeTime = calculateExpiration(archive, null); - IOFileFilter fileDateFilter = FileFilterUtils.and(FileFilterUtils - .fileFileFilter(), new FileDateFilter(null, defaultPurgeTime)); - for (String topDirName : topLevelDirsNotPurged) { - File topLevelDir = new File(archiveRootDir, topDirName); + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + String message = String.format( + "End setup of category date helpers for archive: %s.", + archive.getName()); + statusHandler.debug(message); + } - // Keep both top level hidden files and hidden directories. - if (!topLevelDir.isHidden()) { - purgeCount += purgeFile(topLevelDir, fileDateFilter); + final Calendar minPurgeTime = calculateExpiration(archive, null); + + IOFileFilter defaultTimeFilter = new IOFileFilter() { + + @Override + public boolean accept(File dir, String name) { + File file = new File(dir, name); + return accept(file); + } + + @Override + public boolean accept(File file) { + Calendar time = TimeUtil.newGmtCalendar(); + time.setTimeInMillis(file.lastModified()); + return time.compareTo(minPurgeTime) < 0; + } + }; + + File[] topLevelFiles = archiveRootDir.listFiles(); + for (File topFile : topLevelFiles) { + // In top level directory ignore all hidden files and directories. + if (!topFile.isHidden()) { + if (topFile.isDirectory()) { + boolean isInCategory = false; + for (CategoryConfig category : archive.getCategoryList()) { + CategoryFileDateHelper helper = helperMap.get(category); + + if (helper.isCategoryDirectory(topFile.getName())) { + isInCategory = true; + if (statusHandler.isPriorityEnabled(Priority.INFO)) { + String message = String + .format("Start purge of category %s - %s, directory \"%s\".", + archive.getName(), + category.getName(), + topFile.getAbsolutePath()); + statusHandler.info(message); + } + + final Calendar extPurgeTime = calculateExpiration( + archive, category); + int pc = purgeDir(topFile, defaultTimeFilter, + minPurgeTime, extPurgeTime, helper, + category); + purgeCount += pc; + if (statusHandler.isPriorityEnabled(Priority.INFO)) { + String message = String + .format("End purge of category %s - %s, directory \"%s\", deleted %d files and directories.", + archive.getName(), + category.getName(), + topFile.getAbsolutePath(), pc); + statusHandler.info(message); + } + break; + } + } + if (isInCategory == false) { + if (statusHandler.isPriorityEnabled(Priority.INFO)) { + String message = String.format( + "Start purge of directory: \"%s\".", + topFile.getAbsolutePath()); + statusHandler.info(message); + } + int pc = purgeDir(topFile, defaultTimeFilter); + purgeCount += pc; + if (statusHandler.isPriorityEnabled(Priority.INFO)) { + String message = String + .format("End purge of directory: \"%s\", deleted %d files and directories.", + topFile.getAbsolutePath(), pc); + statusHandler.info(message); + } + } + } else { + if (defaultTimeFilter.accept(topFile)) { + purgeCount += deleteFile(topFile); + } + } } } return purgeCount; } /** - * Recursive method for purging files. Never pass in a directory you do not - * want deleted when purging makes it an empty directory. + * Purge the contents of a directory of expired data leaving a possibly + * empty directory. * - * @param fileToPurge - * @param filter - * @return purgeCount number of files and directories purged + * @param dir + * @param defaultTimeFilter + * @param minPurgeTime + * @param extPurgeTime + * @param helper + * @return purgerCount */ - private int purgeFile(File fileToPurge, IOFileFilter filter) { + private int purgeDir(File dir, IOFileFilter defaultTimeFilter, + Calendar minPurgeTime, Calendar extPurgeTime, + CategoryFileDateHelper helper, CategoryConfig category) { int purgeCount = 0; - if (fileToPurge.isFile() && filter.accept(fileToPurge)) { - if (fileToPurge.delete()) { - ++purgeCount; - if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { - statusHandler.debug("Purged file: \"" - + fileToPurge.getAbsolutePath() + "\""); - } - } else { - statusHandler.warn("Failed to purge file: " - + fileToPurge.getAbsolutePath()); - } - } else if (fileToPurge.isDirectory() && !fileToPurge.isHidden()) { - // Purge only visible directories. - File[] expiredFilesInDir = fileToPurge.listFiles(); - - for (File dirFile : expiredFilesInDir) { - purgeCount += purgeFile(dirFile, filter); - } - - // Attempt to delete empty directory. - if ((purgeCount >= expiredFilesInDir.length) - && (fileToPurge.list().length == 0)) { - if (!fileToPurge.delete()) { - statusHandler.warn("Failed to purge directory: " - + fileToPurge.getAbsolutePath()); - } else { - ++purgeCount; - if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { - statusHandler.debug("Purged directory: \"" - + fileToPurge.getAbsolutePath() - + File.separator + "\""); + for (File file : dir.listFiles()) { + if (!file.isHidden()) { + DataSetStatus status = helper.getFileDate(file); + if (status.isInDataSet()) { + Collection labels = category + .getSelectedDisplayNames(); + boolean isSelected = false; + for (String label : status.getDisplayLabels()) { + if (labels.contains(label)) { + isSelected = true; + break; + } } + + Calendar checkTime = (isSelected ? extPurgeTime + : minPurgeTime); + Calendar fileTime = status.getTime(); + boolean purge = fileTime.compareTo(checkTime) < 0; + + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + String message = String + .format("%s [%s] category [%s] %s retention [%s] checkTime [%s] = %s.", + (file.isDirectory() ? "Directory" + : "File"), file + .getAbsoluteFile(), category + .getName(), (isSelected ? "ext" + : "min"), TimeUtil + .formatCalendar(checkTime), + TimeUtil.formatCalendar(fileTime), + (purge ? "purge" : "retain")); + statusHandler.debug(message); + } + + if (purge) { + if (file.isDirectory()) { + purgeCount += purgeDir(file, + FileFilterUtils.trueFileFilter()); + if (file.list().length == 0) { + purgeCount += purgeDir(file, + FileFilterUtils.trueFileFilter()); + } + } else { + purgeCount += deleteFile(file); + } + } + } else if (file.isDirectory()) { + purgeCount += purgeDir(file, defaultTimeFilter, + minPurgeTime, extPurgeTime, helper, category); + if (file.list().length == 0) { + purgeCount += deleteFile(file); + } + } else if (defaultTimeFilter.accept(file)) { + purgeCount += deleteFile(file); } } } + + return purgeCount; + } + + /** + * Recursively purge the contents of a directory based on the filter. The + * directory in the initial call is not deleted. This may result in an empty + * directory which is the desired result for top level directories. + * + * + * @param dir + * @param fileDataFilter + * @return purgeCount + */ + private int purgeDir(File dir, IOFileFilter fileDataFilter) { + int purgeCount = 0; + for (File file : dir.listFiles()) { + if (!file.isHidden()) { + if (file.isDirectory()) { + purgeCount += purgeDir(file, fileDataFilter); + if (file.list().length == 0) { + purgeCount += deleteFile(file); + } + } else if (fileDataFilter.accept(file)) { + purgeCount += deleteFile(file); + } + } + } + return purgeCount; + } + + /** + * Delete a file or directory. + * + * @param file + * @return purgeCount + */ + private int deleteFile(File file) { + int purgeCount = 0; + boolean isDir = file.isDirectory(); + if (file.delete()) { + ++purgeCount; + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler + .debug(String.format("Purged %s: \"%s\"", + (isDir ? "directory" : "file"), + file.getAbsolutePath())); + } + } else { + statusHandler.warn(String.format("Failed to purge %s: \"%s\"", + (isDir ? "directory" : "file"), file.getAbsolutePath())); + } return purgeCount; } @@ -644,39 +791,60 @@ public class ArchiveConfigManager { * @param categoryConfig * @return dirs */ - private List getDirs(File rootFile, CategoryDataSet dataSet) { - List resultDirs = new ArrayList(); + private Map> getDirs(File rootFile, + CategoryConfig categoryConfig) { + List resultDirs = null; List dirs = new ArrayList(); List tmpDirs = new ArrayList(); List swpDirs = null; + List dataSets = categoryConfig.getDataSetList(); + Map> rval = new HashMap>( + dataSets.size(), 1); - for (String dirPattern : dataSet.getDirPatterns()) { - String[] subExpr = dirPattern.split(File.separator); - dirs.clear(); - dirs.add(rootFile); - tmpDirs.clear(); + // keep an in memory map since some of the categories cause the same + // directories to be listed over and over + Map> polledDirs = new HashMap>(); - for (String regex : subExpr) { - Pattern subPattern = Pattern.compile("^" + regex + "$"); - IOFileFilter filter = FileFilterUtils - .makeDirectoryOnly(new RegexFileFilter(subPattern)); + for (CategoryDataSet dataSet : dataSets) { + resultDirs = new LinkedList(); - for (File dir : dirs) { - File[] list = dir.listFiles(); - if (list != null) { - List dirList = Arrays.asList(list); - tmpDirs.addAll(Arrays.asList(FileFilterUtils.filter( - filter, dirList))); - } - } - swpDirs = dirs; - dirs = tmpDirs; - tmpDirs = swpDirs; + for (String dirPattern : dataSet.getDirPatterns()) { + String[] subExpr = dirPattern.split(File.separator); + dirs.clear(); + dirs.add(rootFile); tmpDirs.clear(); + + for (String regex : subExpr) { + Pattern subPattern = Pattern.compile("^" + regex + "$"); + IOFileFilter filter = FileFilterUtils + .makeDirectoryOnly(new RegexFileFilter(subPattern)); + + for (File dir : dirs) { + List dirList = polledDirs.get(dir); + if (dirList == null) { + File[] list = dir.listFiles(); + dirList = Arrays.asList(list); + polledDirs.put(dir, dirList); + } + + if (dirList != null) { + tmpDirs.addAll(FileFilterUtils.filterList(filter, + dirList)); + } + } + + swpDirs = dirs; + dirs = tmpDirs; + tmpDirs = swpDirs; + tmpDirs.clear(); + } + + resultDirs.addAll(dirs); } - resultDirs.addAll(dirs); + rval.put(dataSet, resultDirs); } - return resultDirs; + + return rval; } /** @@ -701,10 +869,11 @@ public class ArchiveConfigManager { categoryName); File rootFile = new File(rootDirName); TreeMap displays = new TreeMap(); + Map> dirMap = getDirs(rootFile, + categoryConfig); for (CategoryDataSet dataSet : categoryConfig.getDataSetList()) { List dataSetDirPatterns = dataSet.getDirPatterns(); - - List dirs = getDirs(rootFile, dataSet); + List dirs = dirMap.get(dataSet); int beginIndex = rootFile.getAbsolutePath().length() + 1; List patterns = new ArrayList( diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryDataSet.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryDataSet.java index a68e9f887f..a36177d1f4 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryDataSet.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryDataSet.java @@ -43,7 +43,8 @@ import com.raytheon.uf.common.time.util.TimeUtil; * ------------ ---------- ----------- -------------------------- * Aug 6, 2013 #2224 rferrel Initial creation * Oct 02, 2013 #2147 rferrel Allow Date to ignore hour in time stamp. - * + * Dec 10, 2013 #2624 rferrel Added Julian date. + * Dec 17, 2013 2603 rjpeter Clear low order time fields on time generation. * * * @author rferrel @@ -52,22 +53,26 @@ import com.raytheon.uf.common.time.util.TimeUtil; @XmlAccessorType(XmlAccessType.NONE) @XmlRootElement(name = "dataSet") public class CategoryDataSet { - public static final int YEAR_INDEX = 0; + private static final int YEAR_INDEX = 0; - public static final int MONTH_INDEX = 1; + private static final int MONTH_INDEX = 1; - public static final int DAY_INDEX = 2; + private static final int DAY_OF_YEAR_INDEX = 1; - public static final int HOUR_INDEX = 3; + private static final int DAY_INDEX = 2; - public static final int TIMESTAMP_INDEX = 0; + private static final int JULIAN_HOUR_INDEX = 2; + + private static final int HOUR_INDEX = 3; + + private static final int TIMESTAMP_INDEX = 0; /** * Types of times and the number of indices for getting the time stamp from * patterns. */ public static enum TimeType { - Date(4), EpochSec(1), EpochMS(1), File(0); + Date(4), EpochSec(1), EpochMS(1), File(0), Julian(3); private final int numIndices; @@ -199,7 +204,8 @@ public class CategoryDataSet { * @return true when only the dirPatterns should be used. */ public boolean isDirOnly() { - return filePattern == null || filePattern.equals(".*"); + return (filePattern == null) || (filePattern.length() == 0) + || ".*".equals(filePattern); } /** @@ -249,6 +255,7 @@ public class CategoryDataSet { } fileCal.set(year, month, day, hour, 0, 0); + fileCal.set(Calendar.MILLISECOND, 0); fileTime = fileCal.getTimeInMillis(); break; case EpochMS: @@ -263,6 +270,42 @@ public class CategoryDataSet { case File: fileTime = null; break; + case Julian: + Calendar julainCal = TimeUtil.newGmtCalendar(); + int jYear = Integer.parseInt(matcher + .group(timeIndices[CategoryDataSet.YEAR_INDEX])); + int jDay = Integer.parseInt(matcher + .group(timeIndices[CategoryDataSet.DAY_OF_YEAR_INDEX])); + + // When two digit year determine century. + if (jYear < 100) { + int cYear = julainCal.get(Calendar.YEAR); + jYear += (cYear - (cYear % 100)); + julainCal.add(Calendar.YEAR, 1); + int nextYear = julainCal.get(Calendar.YEAR); + + // If date too far into the future back up a century. + if ((jYear > nextYear) || ((jYear == nextYear) && (jDay > 31))) { + jYear -= 100; + } + } + + julainCal.set(Calendar.YEAR, jYear); + julainCal.set(Calendar.DAY_OF_YEAR, jDay); + + // Default to last hour of the day. + int jHour = 23; + if (timeIndices[CategoryDataSet.JULIAN_HOUR_INDEX] >= 0) { + jHour = Integer.parseInt(matcher + .group(timeIndices[CategoryDataSet.JULIAN_HOUR_INDEX])); + } + julainCal.set(Calendar.HOUR_OF_DAY, jHour); + julainCal.set(Calendar.MINUTE, 0); + julainCal.set(Calendar.SECOND, 0); + julainCal.set(Calendar.MILLISECOND, 0); + fileTime = julainCal.getTimeInMillis(); + break; + default: fileTime = null; break; diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryFileDateHelper.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryFileDateHelper.java index 6f87b7e73d..441eb00c7d 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryFileDateHelper.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/CategoryFileDateHelper.java @@ -20,14 +20,14 @@ package com.raytheon.uf.common.archive.config; import java.io.File; +import java.text.FieldPosition; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.io.FilenameUtils; - import com.raytheon.uf.common.time.util.TimeUtil; /** @@ -42,7 +42,8 @@ import com.raytheon.uf.common.time.util.TimeUtil; * Jun 21, 2013 1965 bgonzale Initial creation * Aug 03, 2013 2224 rferrel Changes for new configuration files. * Aug 28, 2013 2299 rferrel Changes in IFileDateHelper. - * + * Dec 04, 2013 2603 rferrel Changes to improve archive purging. + * Dec 17, 2013 2603 rjpeter Fix file data pattern matching. * * * @author bgonzale @@ -54,16 +55,27 @@ public class CategoryFileDateHelper implements IFileDateHelper { * Date information derived from each of a Category's dirPatterns. */ private static class CategoryDateInfo { + /** Always use the same field postion. */ + private static final FieldPosition pos0 = new FieldPosition(0); + + /** Pattern used to get the date. */ private final Pattern datePattern; + /** Pattern for getting top level directories. */ private final Pattern categoryTopLevelDirPattern; + /** The type of type stamp being used. */ private final CategoryDataSet.TimeType timeType; - private final boolean isDirOnly; - + /** Indices in the pattern group used to get the time stamp. */ private final int[] timeIndices; + /** The format used to get the display label. */ + private final String displayLabelFormat; + + /** Formatter used to get display label. */ + private final MessageFormat msgfmt; + /** * Initialization constructor. * @@ -73,24 +85,46 @@ public class CategoryFileDateHelper implements IFileDateHelper { * @param monthIndex * @param dayIndex * @param hourIndex + * @param displayLabelFormat */ public CategoryDateInfo(Pattern datePattern, Pattern categoryTopLevelDirPattern, - CategoryDataSet.TimeType timeType, boolean isDirOnly, - int[] timeIndices) { + CategoryDataSet.TimeType timeType, int[] timeIndices, + String displayLabelFormat) { this.datePattern = datePattern; this.categoryTopLevelDirPattern = categoryTopLevelDirPattern; this.timeType = timeType; - this.isDirOnly = isDirOnly; this.timeIndices = timeIndices; + this.displayLabelFormat = displayLabelFormat; + if (displayLabelFormat != null) { + this.msgfmt = new MessageFormat(this.displayLabelFormat); + } else { + this.msgfmt = null; + } } + /** + * Get the display label from the matcher. This assumes the matcher is a + * pattern match for the date pattern. + * + * @param matcher + * @return label + */ + public String getDisplayLabel(Matcher matcher) { + // Unable to use StringBuilder with MessageFormat. + StringBuffer sb = new StringBuffer(); + String[] args = new String[matcher.groupCount() + 1]; + args[0] = matcher.group(); + for (int i = 1; i < args.length; ++i) { + args[i] = matcher.group(i); + } + String label = msgfmt.format(args, sb, pos0).toString(); + return label; + } } private final List dateInfoList; - private final String rootDir; - /** * Initialization constructor. * @@ -98,8 +132,7 @@ public class CategoryFileDateHelper implements IFileDateHelper { * @param rootDirPattern * categoryTopLevelDirPattern */ - public CategoryFileDateHelper(CategoryConfig config, String rootDir) { - this.rootDir = rootDir; + public CategoryFileDateHelper(CategoryConfig config) { List categoryDataSetList = config.getDataSetList(); int size = 0; for (CategoryDataSet dataSet : categoryDataSetList) { @@ -109,26 +142,26 @@ public class CategoryFileDateHelper implements IFileDateHelper { this.dateInfoList = new ArrayList( size); - boolean isDirOnly; CategoryDataSet.TimeType timeType; for (CategoryDataSet dataSet : categoryDataSetList) { - isDirOnly = dataSet.isDirOnly(); timeType = dataSet.getTimeType(); for (String patternString : dataSet.getDirPatterns()) { Pattern datePattern = dataSet.getPattern(patternString); int dirSeparatorIndex = patternString .indexOf(File.separatorChar); - patternString = dirSeparatorIndex > patternString.length() - || dirSeparatorIndex < 0 ? patternString + patternString = (dirSeparatorIndex > patternString.length()) + || (dirSeparatorIndex < 0) ? patternString : patternString.substring(0, dirSeparatorIndex); Pattern categoryTopLevelDirPattern = Pattern .compile(patternString); int[] timeIndices = dataSet.getTimeIndices(); + String displayLabelFormat = dataSet.getDisplayLabel(); + dateInfoList.add(new CategoryDateInfo(datePattern, - categoryTopLevelDirPattern, timeType, isDirOnly, - timeIndices)); + categoryTopLevelDirPattern, timeType, timeIndices, + displayLabelFormat)); } } } @@ -141,26 +174,19 @@ public class CategoryFileDateHelper implements IFileDateHelper { * .io.File) */ @Override - public Calendar getFileDate(File file) { + public DataSetStatus getFileDate(File file) { String filenamePath = file.getAbsolutePath(); - String pathForFilePatternCheck = filenamePath.substring(rootDir - .length()); - String pathForDirPatternCheck = FilenameUtils - .getFullPathNoEndSeparator(pathForFilePatternCheck); - Calendar result = null; Long timestamp = null; + DataSetStatus result = new DataSetStatus(file); for (CategoryDateInfo dateInfo : dateInfoList) { - Matcher matcher = null; - if (dateInfo.isDirOnly) { - matcher = dateInfo.datePattern.matcher(pathForDirPatternCheck); - } else { - matcher = dateInfo.datePattern.matcher(pathForFilePatternCheck); - } + Matcher matcher = dateInfo.datePattern.matcher(filenamePath); if (matcher.matches()) { timestamp = CategoryDataSet.getMatchTimeInMilliseconds( dateInfo.timeType, dateInfo.timeIndices, matcher); + result.setInDataSet(true); + result.addDisplayLabel(dateInfo.getDisplayLabel(matcher)); break; } } @@ -170,11 +196,9 @@ public class CategoryFileDateHelper implements IFileDateHelper { timestamp = file.lastModified(); } - // TODO future speed improvement refactor IFileDateHelper to have a - // method that returns a long instead of Calendar. That will prevent - // converting Calendar to long then back to a Calendar. - result = TimeUtil.newGmtCalendar(); - result.setTimeInMillis(timestamp); + Calendar time = TimeUtil.newGmtCalendar(); + time.setTimeInMillis(timestamp); + result.setTime(time); return result; } diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/DataSetStatus.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/DataSetStatus.java new file mode 100644 index 0000000000..622e354ff6 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/DataSetStatus.java @@ -0,0 +1,128 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.uf.common.archive.config; + +import java.io.File; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; + +/** + * This class used by IFileDateHelper to contains additional information about a + * file. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Dec 4, 2013  2603      rferrel     Initial creation
+ * 
+ * 
+ * + * @author rferrel + * @version 1.0 + */ + +public class DataSetStatus { + + /** The file the status is for. */ + private final File file; + + /** Set to true when file is contains in a data set. */ + private boolean inDataSet = false; + + private final List displayLabels = new ArrayList(1); + + /** The file's time based on IFileDataHelper. */ + private Calendar time = null; + + /** + * The constructor with default values set. + * + * @param file + * should not be null. + */ + DataSetStatus(File file) { + this.file = file; + } + + /** + * The file the information is for. + * + * @return file + */ + public File getFile() { + return file; + } + + /** + * + * @return true when file is in a data set. + */ + public boolean isInDataSet() { + return inDataSet; + } + + /** + * Set data set status. + * + * @param inDataSet + */ + public void setInDataSet(boolean inDataSet) { + this.inDataSet = inDataSet; + } + + /** + * + * @return non-null only when file is in a data set. + */ + public List getDisplayLabels() { + return displayLabels; + } + + /** + * Set the select status. Should only be true when in a data set. + * + * @param isSelected + */ + public void addDisplayLabel(String displayLabel) { + this.displayLabels.add(displayLabel); + } + + /** + * The file's time + * + * @return time + */ + public Calendar getTime() { + return time; + } + + /** + * Set the file's time. + * + * @param time + */ + public void setTime(Calendar time) { + this.time = time; + } +} diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/FileDateFilter.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/FileDateFilter.java deleted file mode 100644 index d349faf652..0000000000 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/FileDateFilter.java +++ /dev/null @@ -1,127 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.common.archive.config; - -import java.io.File; -import java.util.Calendar; - -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.io.filefilter.IOFileFilter; - -import com.raytheon.uf.common.time.util.TimeUtil; - -/** - * Filter files based on a file date parsed using the given file date helper. - * Accept returns true for files that fall between the Start and End times. If - * start is null, then all after start checks will return true. If end is null, - * then all before end checks will return true. - * - *
- * 
- * SOFTWARE HISTORY
- * 
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Jun 18, 2013 1965       bgonzale    Initial creation
- * Aug 28, 2013 2299       rferrel     Reject hidden directories.
- * 
- * 
- * - * @author bgonzale - * @version 1.0 - */ - -public class FileDateFilter implements IOFileFilter { - - private IFileDateHelper helper; - - private final Calendar start; - - private final Calendar end; - - /** - * Initialization constructor. This filter uses file last modified time as - * the filter time. - * - * @param startDate - * @param endDate - */ - public FileDateFilter(Calendar start, Calendar end) { - this(start, end, DEFAULT_FILE_DATE_HELPER); - } - - /** - * Initialization constructor. - * - * @param startDate - * @param endDate - * @param helper - */ - public FileDateFilter(Calendar start, Calendar end, IFileDateHelper helper) { - this.helper = helper == null ? DEFAULT_FILE_DATE_HELPER : helper; - this.start = start; - this.end = end; - } - - /* - * (non-Javadoc) - * - * @see org.apache.commons.io.filefilter.IOFileFilter#accept(java.io.File) - */ - @Override - public boolean accept(File file) { - String filePath = file.getAbsolutePath(); - String dirName = FilenameUtils.getFullPath(filePath); - String fileName = FilenameUtils.getName(filePath); - return accept(new File(dirName), fileName); - } - - /* - * (non-Javadoc) - * - * @see org.apache.commons.io.filefilter.IOFileFilter#accept(java.io.File, - * java.lang.String) - */ - @Override - public boolean accept(File dir, String name) { - File file = new File(dir, name); - Calendar fileDate = helper.getFileDate(file); - boolean isAfterEqualsStart = start == null || fileDate.after(start) - || fileDate.equals(start); - boolean isBeforeEqualsEnd = end == null || fileDate.before(end) - || fileDate.equals(end); - return isAfterEqualsStart && isBeforeEqualsEnd; - } - - /** - * This File Date helper returns a file's last modified time. - */ - private static final IFileDateHelper DEFAULT_FILE_DATE_HELPER = new IFileDateHelper() { - @Override - public Calendar getFileDate(File file) { - // use file last modified date - long lastModifiedMillis = file.lastModified(); - Calendar result = TimeUtil.newGmtCalendar(); - result.setTimeInMillis(lastModifiedMillis); - return result; - } - }; - -} diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/IFileDateHelper.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/IFileDateHelper.java index 368e6fe6d4..553da5d085 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/IFileDateHelper.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/IFileDateHelper.java @@ -20,7 +20,6 @@ package com.raytheon.uf.common.archive.config; import java.io.File; -import java.util.Calendar; /** * Helper to get a file last modification date. @@ -33,7 +32,8 @@ import java.util.Calendar; * ------------ ---------- ----------- -------------------------- * Jun 21, 2013 bgonzale Initial creation * Aug 28, 2013 2299 rferrel Change getFileDate argument. - * + * Dec 04, 2013 2603 rferrel Changes to improve archive purging. + * Dec 17, 2013 2603 rjpeter Clean up imports. * * * @author bgonzale @@ -48,6 +48,6 @@ public interface IFileDateHelper { * @param file * @return calendar */ - public Calendar getFileDate(File file); + public DataSetStatus getFileDate(File file); } diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/SelectConfig.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/SelectConfig.java index babc5a51ae..0de3e8b925 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/SelectConfig.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/SelectConfig.java @@ -20,7 +20,9 @@ package com.raytheon.uf.common.archive.config; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -40,6 +42,7 @@ import com.raytheon.uf.common.archive.config.select.CategorySelect; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jul 19, 2013 2221 rferrel Initial creation + * Dec 11, 2013 2603 rferrel Make selections a set. * * * @@ -113,24 +116,24 @@ public class SelectConfig { } /** - * Get a list of selected display names for the archive and its category. + * Get a set of selected display names for the archive and its category. * * @param archiveName * @param categoryName * @return displayLabelList may be an empty list. */ - public List getSelectedList(String archiveName, String categoryName) { + public Set getSelectedSet(String archiveName, String categoryName) { ArchiveSelect archiveSelect = getArchive(archiveName); if (archiveSelect == null || archiveSelect.isEmpty()) { - return new ArrayList(0); + return new HashSet(0); } CategorySelect categorySelect = getCategorySelect(categoryName, archiveSelect); if (categorySelect == null || categorySelect.isEmpty()) { - return new ArrayList(0); + return new HashSet(0); } - List selected = categorySelect.getSelectList(); + Set selected = categorySelect.getSelectSet(); return selected; } diff --git a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/select/CategorySelect.java b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/select/CategorySelect.java index 5d4d2c5ec0..3e0f7103d7 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/select/CategorySelect.java +++ b/edexOsgi/com.raytheon.uf.common.archive/src/com/raytheon/uf/common/archive/config/select/CategorySelect.java @@ -20,7 +20,9 @@ package com.raytheon.uf.common.archive.config.select; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; @@ -38,6 +40,7 @@ import javax.xml.bind.annotation.XmlRootElement; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jul 19, 2013 2221 rferrel Initial creation + * Dec 11, 2013 2603 rferrel Selected now a Set. * * * @@ -57,7 +60,7 @@ public class CategorySelect { * List of selected labels. */ @XmlElement(name = "selectedDisplayName") - private final List selectList = new ArrayList(); + private final Set selectSet = new HashSet(); public String getName() { return name; @@ -67,21 +70,21 @@ public class CategorySelect { this.name = name; } - public List getSelectList() { - return selectList; + public Set getSelectSet() { + return selectSet; } - public void setSelectList(List selectList) { - this.selectList.clear(); - this.selectList.addAll(selectList); + public void setSelectSet(Set selectList) { + this.selectSet.clear(); + this.selectSet.addAll(selectList); } public void add(String displayName) { - selectList.add(displayName); + selectSet.add(displayName); } public boolean isEmpty() { - return selectList.isEmpty(); + return selectSet.isEmpty(); } @Override @@ -89,7 +92,7 @@ public class CategorySelect { StringBuilder sb = new StringBuilder(); sb.append("CategorySelect [ name: ").append(getName()); sb.append("[ "); - for (String select : getSelectList()) { + for (String select : getSelectSet()) { sb.append("\"").append(select).append("\", "); } sb.append("]"); diff --git a/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml b/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml index aa18209389..473509382e 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.archive/res/spring/archive-spring.xml @@ -3,9 +3,7 @@ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd"> - - - + diff --git a/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties b/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties index 3a69251ba6..a7a334bc48 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties +++ b/edexOsgi/com.raytheon.uf.edex.archive/resources/com.raytheon.uf.edex.archive.cron.properties @@ -2,12 +2,15 @@ archive.enable=true # runs database and hdf5 archive for archive server to pull data from archive.cron=0+40+*+*+*+? +# path to store processed archive data +archive.path=/archive + # enable archive purge archive.purge.enable=true -# purge archives -archive.purge.cron=0+5+0/3+*+*+? +# when to purge archives +archive.purge.cron=0+5+0/2+*+*+? # compress database records -archive.compression.enable=true +archive.compression.enable=false # to disable a specific archive, use property archive.disable=pluginName,pluginName... #archive.disable=grid,text,acars \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java index 73449c929f..1d070faeaf 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DataArchiver.java @@ -45,6 +45,7 @@ import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; * ------------ ---------- ----------- -------------------------- * Dec 16, 2011 rjpeter Initial creation * Nov 05, 2013 2499 rjpeter Repackaged, updated to use System properties. + * Dec 11, 2013 2555 rjpeter archivePath overridable via System properties. * * * @author rjpeter @@ -60,6 +61,8 @@ public class DataArchiver { // allows for disabling of specific plugins if desired private final static String DISABLE_PROPERTY = "archive.disable"; + private final static String PATH_PROPERTY = "archive.path"; + private final boolean ARCHIVING_ENABLED; private final Set DISABLED_PLUGINS; @@ -68,10 +71,9 @@ public class DataArchiver { private final List dataArchivers = new LinkedList(); - private String archivePath = null; + private final String archivePath; - public DataArchiver(String archivePath) { - this.archivePath = archivePath; + public DataArchiver() { ARCHIVING_ENABLED = Boolean.getBoolean(ENABLE_PROPERTY); String disabledPluginList = System.getProperty(DISABLE_PROPERTY); if (disabledPluginList != null) { @@ -83,6 +85,9 @@ public class DataArchiver { } else { DISABLED_PLUGINS = Collections.emptySet(); } + + // default to /archive + archivePath = System.getProperty(PATH_PROPERTY, "/archive"); } public void archivePlugins() { diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java new file mode 100644 index 0000000000..4bbb77c4d1 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiveProcessor.java @@ -0,0 +1,739 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.uf.edex.archive; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Writer; +import java.text.DecimalFormat; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +import com.raytheon.uf.common.dataplugin.PluginDataObject; +import com.raytheon.uf.common.dataplugin.PluginProperties; +import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; +import com.raytheon.uf.common.datastorage.DataStoreFactory; +import com.raytheon.uf.common.datastorage.IDataStore; +import com.raytheon.uf.common.datastorage.StorageException; +import com.raytheon.uf.common.datastorage.StorageProperties.Compression; +import com.raytheon.uf.common.serialization.SerializationException; +import com.raytheon.uf.common.serialization.SerializationUtil; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.status.UFStatus.Priority; +import com.raytheon.uf.common.util.FileUtil; +import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; +import com.raytheon.uf.edex.database.plugin.PluginDao; +import com.raytheon.uf.edex.database.processor.IDatabaseProcessor; + +/** + * Receives records to be archived to disk. Records can be written over extended + * periods of time and so when writing, the previous records must be dup elim'd + * against the current set of data to handle database being updated. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Dec 10, 2013 2555       rjpeter     Initial creation
+ * 
+ * 
+ * + * @author rjpeter + * @version 1.0 + */ + +public class DatabaseArchiveProcessor implements IDatabaseProcessor { + private static final transient IUFStatusHandler statusHandler = UFStatus + .getHandler(DatabaseArchiveProcessor.class); + + /** Chunk size for I/O Buffering and Compression */ + private static final int CHUNK_SIZE = 8192; + + private static final String BIN_FILE_EXT = ".bin"; + + private static final String GZIP_FILE_EXT = ".gz"; + + private static final Pattern FILE_COUNT_PATTERN = Pattern + .compile("^(.*\\.bin\\.)(\\d+)(?:\\.gz)?$"); + + protected final String archivePath; + + protected final String pluginName; + + protected final PluginDao dao; + + protected final IPluginArchiveFileNameFormatter nameFormatter; + + protected boolean debugArchiver = false; + + protected boolean compressDatabaseFiles = false; + + protected int fetchSize = 1000; + + protected Set datastoreFilesToArchive = new HashSet(); + + protected Set filesCreatedThisSession = new HashSet(); + + protected Set dirsToCheckNumbering = new HashSet(); + + protected int recordsSaved = 0; + + protected boolean failed = false; + + public DatabaseArchiveProcessor(String archivePath, String pluginName, + PluginDao dao, IPluginArchiveFileNameFormatter nameFormatter) { + this.archivePath = archivePath; + this.pluginName = pluginName; + this.dao = dao; + this.nameFormatter = nameFormatter; + } + + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.database.processor.IDatabaseProcessor#process(java + * .util.List) + */ + @Override + public boolean process(List objects) { + if ((objects != null) && !objects.isEmpty()) { + Set datastoreFiles = new HashSet(); + statusHandler.info(pluginName + ": Processing rows " + recordsSaved + + " to " + (recordsSaved + objects.size())); + + @SuppressWarnings("unchecked") + List> pdos = (List>) objects; + Map>> pdosByFile = new HashMap>>(); + for (PersistableDataObject pdo : pdos) { + String path = nameFormatter.getFilename(pluginName, dao, pdo); + if (path.endsWith(".h5")) { + datastoreFiles.add(path); + path = path.substring(0, path.length() - 3); + } + + List> list = pdosByFile.get(path); + if (list == null) { + list = new LinkedList>(); + pdosByFile.put(path, list); + } + + list.add(pdo); + } + + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler.debug(pluginName + ": Processed " + + objects.size() + " rows into " + pdosByFile.size() + + " files"); + } + + try { + savePdoMap(pdosByFile); + datastoreFilesToArchive.addAll(datastoreFiles); + recordsSaved += pdos.size(); + } catch (Exception e) { + statusHandler.error(pluginName + + ": Error occurred saving data to archive", e); + failed = true; + return false; + } + } + + return true; + } + + /** + * Checks file numbering on any directory that have been flagged. Also + * archives any associated hdf5 files. + */ + @Override + public void finish() { + for (File dir : dirsToCheckNumbering) { + checkFileNumbering(dir); + } + + if (!datastoreFilesToArchive.isEmpty()) { + statusHandler.info(pluginName + ": archiving " + + datastoreFilesToArchive.size() + " hdf5 file(s)"); + Compression compRequired = Compression.LZF; + PluginProperties props = PluginRegistry.getInstance() + .getRegisteredObject(pluginName); + + if ((props != null) && (props.getCompression() != null)) { + if (compRequired.equals(Compression.valueOf(props + .getCompression()))) { + // if plugin is already compressed to the correct level, + // no additional compression required + compRequired = null; + } + } + + for (String dataStoreFile : datastoreFilesToArchive) { + IDataStore ds = DataStoreFactory.getDataStore(new File(FileUtil + .join(pluginName, dataStoreFile))); + // all dataStore files should end with .h5 + String destDir = (dataStoreFile.endsWith(".h5") ? dataStoreFile + .substring(0, dataStoreFile.length() - 3) + : dataStoreFile); + + String outputDir = FileUtil.join(archivePath, pluginName, + destDir) + File.separator; + + try { + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler.debug(pluginName + + ": Archiving data store file " + + dataStoreFile + " to " + outputDir); + } + + // copy the changed hdf5 file, does repack if + // compRequired, otherwise pure file copy + ds.copy(outputDir, compRequired, null, 0, 0); + } catch (StorageException e) { + statusHandler.handle(Priority.PROBLEM, + e.getLocalizedMessage()); + } + } + statusHandler.info(pluginName + ": hdf5 archiving complete"); + } + } + + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.database.processor.IDatabaseProcessor#getFetchSize() + */ + @Override + public int getBatchSize() { + return fetchSize; + } + + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.database.processor.IDatabaseProcessor#setFetchSize + * (int) + */ + @Override + public void setBatchSize(int fetchSize) { + this.fetchSize = fetchSize; + } + + /** + * True if the processor had a failure during its execution. + * + * @return + */ + public boolean isFailed() { + return failed; + } + + /** + * Reset any state fields so processor can be reused. + */ + public void reset() { + datastoreFilesToArchive.clear(); + filesCreatedThisSession.clear(); + dirsToCheckNumbering.clear(); + recordsSaved = 0; + failed = false; + } + + /** + * @return the debugArchiver + */ + public boolean isDebugArchiver() { + return debugArchiver; + } + + /** + * @param debugArchiver + * the debugArchiver to set + */ + public void setDebugArchiver(boolean debugArchiver) { + this.debugArchiver = debugArchiver; + } + + /** + * @return the compressDatabaseFiles + */ + public boolean isCompressDatabaseFiles() { + return compressDatabaseFiles; + } + + /** + * @param compressDatabaseFiles + * the compressDatabaseFiles to set + */ + public void setCompressDatabaseFiles(boolean compressDatabaseFiles) { + this.compressDatabaseFiles = compressDatabaseFiles; + } + + /** + * @return the recordsSaved + */ + public int getRecordsSaved() { + return recordsSaved; + } + + /** + * Saves data in the pdo map to disk. The data in the pdoMap is dup elim'd + * against any previously written records. + * + * @param pdoMap + * @throws SerializationException + * @throws IOException + */ + protected void savePdoMap(Map>> pdoMap) + throws SerializationException, IOException { + StringBuilder baseDir = new StringBuilder(160); + Set identifierSet = null; + + for (Map.Entry>> entry : pdoMap + .entrySet()) { + baseDir.setLength(0); + baseDir.append(archivePath).append(File.separator) + .append(pluginName).append(File.separator) + .append(entry.getKey()).append(File.separator); + File dir = new File(baseDir.toString()); + + if (!dir.exists()) { + if (!dir.mkdirs() && !dir.exists()) { + throw new IOException("Cannot create directory " + + baseDir.toString()); + } + } + + List> pdos = entry.getValue(); + if (identifierSet == null) { + identifierSet = new HashSet(pdos.size(), 1); + } else { + identifierSet.clear(); + } + + for (PersistableDataObject pdo : pdos) { + identifierSet.add(pdo.getIdentifier()); + } + + SortedMap fileMap = getArchivedFiles(dir); + pdos = dupElimPreviousFiles(fileMap, pdos, identifierSet); + + // if any records left in pdos, write to disk + if (pdos.size() > 0) { + int fileCount = 1; + if (!fileMap.isEmpty()) { + fileCount += fileMap.lastKey(); + } + File newFile = new File(dir, dir.getName() + BIN_FILE_EXT + "." + + fileCount); + fileMap.put(fileCount, newFile); + writeDataToDisk(newFile, pdos); + filesCreatedThisSession.add(newFile.getAbsolutePath()); + + // check if we have added another digit and should add a 0 to + // previous numbers + String fileCountStr = Integer.toString(fileCount); + if (fileCountStr.startsWith("1") && fileCountStr.endsWith("0")) { + dirsToCheckNumbering.add(dir); + } + } + } + } + + /** + * Checks the pdos against the previously written pdos. If a previous pdo + * would be overwritten its entry is deleted from the previous file and the + * file rewritten. If the last file does not contain a full fetch set, then + * pdos are appended up to the fetch size. If any pdos are remaining to be + * written they are returned otherwise an empty list is returned. + * + * @param fileMap + * @param pdos + * @param identifierSet + * @return + * @throws IOException + * @throws SerializationException + */ + protected List> dupElimPreviousFiles( + SortedMap fileMap, + List> pdos, Set identifierSet) + throws IOException, SerializationException { + if (!fileMap.isEmpty()) { + Iterator fileIter = fileMap.values().iterator(); + while (fileIter.hasNext()) { + File dataFile = fileIter.next(); + + if (filesCreatedThisSession + .contains(dataFile.getAbsolutePath())) { + statusHandler + .debug(pluginName + + ": Skipping dup check on data file created this session: " + + dataFile.getName()); + continue; + } + + List> pdosFromDisk = readDataFromDisk(dataFile); + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler.debug(pluginName + ": Checking " + + pdosFromDisk.size() + " old records from file: " + + dataFile.getAbsolutePath()); + } + Iterator> pdoIter = pdosFromDisk + .iterator(); + boolean needsUpdate = false; + int dupsRemoved = 0; + while (pdoIter.hasNext()) { + PersistableDataObject pdo = pdoIter.next(); + if (identifierSet.contains(pdo.getIdentifier())) { + pdoIter.remove(); + needsUpdate = true; + dupsRemoved++; + } + } + + if (statusHandler.isPriorityEnabled(Priority.DEBUG) + && (dupsRemoved > 0)) { + statusHandler.debug(pluginName + ": Removed " + dupsRemoved + + " old records from file: " + + dataFile.getAbsolutePath()); + } + + if (!fileIter.hasNext() && (pdosFromDisk.size() < fetchSize)) { + // last file, add more data to it + needsUpdate = true; + int numToAdd = fetchSize - pdosFromDisk.size(); + numToAdd = Math.min(numToAdd, pdos.size()); + + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler.debug(pluginName + ": Adding " + numToAdd + + " records to file: " + + dataFile.getAbsolutePath()); + } + + pdosFromDisk.addAll(pdos.subList(0, numToAdd)); + if (numToAdd < pdos.size()) { + pdos = pdos.subList(numToAdd, pdos.size()); + } else { + pdos = Collections.emptyList(); + } + } + + if (needsUpdate) { + if (!pdosFromDisk.isEmpty()) { + writeDataToDisk(dataFile, pdosFromDisk); + } else { + dirsToCheckNumbering.add(dataFile.getParentFile()); + dataFile.delete(); + fileIter.remove(); + } + } + } + } + + return pdos; + } + + /** + * Reads the serialized data from file. If there is a problem reading the + * file it is renamed to .bad. + * + * @param file + * @return + * @throws IOException + * @throws SerializationException + */ + @SuppressWarnings("unchecked") + protected List> readDataFromDisk(File file) + throws IOException, SerializationException { + if (file.exists()) { + InputStream is = null; + boolean successful = false; + try { + if (file.getName().endsWith(GZIP_FILE_EXT)) { + is = new GZIPInputStream(new FileInputStream(file), + CHUNK_SIZE); + } else { + is = new BufferedInputStream(new FileInputStream(file), + CHUNK_SIZE); + } + + List> rval = SerializationUtil + .transformFromThrift(List.class, is); + successful = true; + return rval; + } finally { + if (!successful) { + // couldn't read in file, move it to bad + file.renameTo(new File(file.getAbsoluteFile() + ".bad")); + } + if (is != null) { + try { + is.close(); + } catch (IOException e) { + statusHandler.error(pluginName + + ": Error occurred closing input stream", e); + } + } + } + } + + return Collections.emptyList(); + } + + /** + * Dynamic serializes the pdos. The data will be written to file. If the + * file has .gz extension and the database compression flag is not set, the + * .gz file will be deleted in favor of the uncompressed file. Reverse also + * holds true. This allows a file written under a different compression + * scheme to automatically be converted if rewritten out. + * + * @param file + * @param pdos + * @throws IOException + * @throws SerializationException + */ + protected void writeDataToDisk(File file, + List> pdos) throws IOException, + SerializationException { + OutputStream os = null; + + File gzipFile = null; + File baseFile = null; + String fileAbsPath = file.getAbsolutePath(); + + if (fileAbsPath.endsWith(GZIP_FILE_EXT)) { + gzipFile = file; + baseFile = new File(fileAbsPath.substring(0, + fileAbsPath.length() - 3)); + } else { + baseFile = file; + gzipFile = new File(fileAbsPath + GZIP_FILE_EXT); + } + + try { + if (!file.getParentFile().exists()) { + file.getParentFile().mkdirs(); + } + + if (compressDatabaseFiles) { + if (baseFile.exists()) { + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler + .debug(pluginName + + ": Database compression flag changed, deleting uncompressed file " + + baseFile.getAbsolutePath()); + } + baseFile.delete(); + } + + os = new GZIPOutputStream(new FileOutputStream(gzipFile), + CHUNK_SIZE); + } else { + if (gzipFile.exists()) { + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler + .debug(pluginName + + ": Database compression flag changed, deleting compressed file " + + gzipFile.getAbsolutePath()); + } + gzipFile.delete(); + } + + os = new BufferedOutputStream(new FileOutputStream(baseFile), + CHUNK_SIZE); + } + + if (statusHandler.isPriorityEnabled(Priority.DEBUG)) { + statusHandler.debug(pluginName + ": Serializing " + pdos.size() + + " records to file " + file.getAbsolutePath()); + } + + // Thrift serialize pdo list + SerializationUtil.transformToThriftUsingStream(pdos, os); + os.flush(); + } finally { + if (os != null) { + try { + os.close(); + } catch (IOException e) { + statusHandler.error(pluginName + + ": Error occurred closing output stream", e); + } + } + } + + if (debugArchiver) { + String debugPath = baseFile.getAbsolutePath() + ".debug"; + dumpPdos(debugPath.toString(), pdos); + } + } + + /** + * Dump the record information being archived to a file. + * + * @param basePath + * @param pdos + */ + private void dumpPdos(String basePath, List> pdos) { + Writer writer = null; + File dumpFile = null; + + try { + int index = 0; + do { + index++; + dumpFile = new File(basePath + "." + index); + } while (dumpFile.exists()); + + Iterator> pdoIter = pdos.iterator(); + writer = new BufferedWriter(new FileWriter(dumpFile)); + statusHandler.info(String.format("%s: Dumping records to: %s", + pluginName, dumpFile.getAbsolutePath())); + + while (pdoIter.hasNext()) { + PersistableDataObject pdo = pdoIter.next(); + if (pdo instanceof PluginDataObject) { + PluginDataObject pluginDataObject = (PluginDataObject) pdo; + if (pluginDataObject.getId() != 0) { + // otherwise was read from file and will be recorded in + // a previous entry + writer.write("" + pluginDataObject.getId() + ":"); + writer.write(pluginDataObject.getDataURI()); + writer.write("\n"); + } + } else { + writer.write(pdo.getIdentifier().toString()); + writer.write("\n"); + } + } + } catch (Exception e) { + statusHandler + .handle(Priority.PROBLEM, pluginName + + ": Unable to dump pdo data to debug file: " + + (dumpFile != null ? dumpFile.getAbsolutePath() + : null), e); + } finally { + if (writer != null) { + try { + writer.close(); + } catch (Exception e) { + // Ignore + } + } + } + } + + /** + * Returns a map of the archived database files in the directory. Map + * ordered by file count in the file name. + * + * @param baseDir + * @return + */ + protected SortedMap getArchivedFiles(File baseDir) { + File[] dirListing = baseDir.listFiles(); + SortedMap fileMap = new TreeMap(); + + if ((dirListing != null) && (dirListing.length > 0)) { + for (File dataFile : dirListing) { + if (dataFile.isFile()) { + String name = dataFile.getName(); + Matcher matcher = FILE_COUNT_PATTERN.matcher(name); + if (matcher.matches()) { + String fileNumStr = matcher.group(2); + int fileNum = Integer.parseInt(fileNumStr); + fileMap.put(fileNum, dataFile); + } + } + } + } + + return fileMap; + } + + /** + * Checks database bin files in directory for consistency. If a file has + * been deleted or if the number of digits has increased, files are renamed + * to fill in holes as well as to have leading zeros as necessary. + * + * @param dir + */ + protected void checkFileNumbering(File dir) { + SortedMap fileMap = getArchivedFiles(dir); + int nextFileCount = 1; + int size = fileMap.size(); + StringBuilder formatString = new StringBuilder(4); + do { + formatString.append("0"); + size /= 10; + } while (size > 0); + + DecimalFormat format = new DecimalFormat(formatString.toString()); + + for (Map.Entry entry : fileMap.entrySet()) { + int fileNum = entry.getKey(); + File oldFile = entry.getValue(); + String name = oldFile.getName(); + Matcher m = FILE_COUNT_PATTERN.matcher(name); + if (m.matches()) { + String oldCountString = m.group(2); + + if ((fileNum > nextFileCount) + || (oldCountString.length() != formatString.length())) { + // rename file to file count + String newFileName = m.group(1) + format.format(fileNum); + if (name.endsWith(GZIP_FILE_EXT)) { + newFileName += GZIP_FILE_EXT; + } + + File newFile = new File(oldFile.getParent(), newFileName); + oldFile.renameTo(newFile); + } + + nextFileCount++; + } + } + } +} diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java index 74d256d8cd..f9a4e8af2c 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DatabaseArchiver.java @@ -19,47 +19,20 @@ **/ package com.raytheon.uf.edex.archive; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Writer; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Set; import java.util.TimeZone; -import java.util.zip.GZIPInputStream; -import java.util.zip.GZIPOutputStream; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.PluginProperties; -import com.raytheon.uf.common.dataplugin.persist.IPersistable; -import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; -import com.raytheon.uf.common.datastorage.DataStoreFactory; -import com.raytheon.uf.common.datastorage.IDataStore; -import com.raytheon.uf.common.datastorage.StorageException; -import com.raytheon.uf.common.datastorage.StorageProperties.Compression; -import com.raytheon.uf.common.serialization.SerializationException; -import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.time.util.TimeUtil; -import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.cluster.ClusterLockUtils; @@ -70,7 +43,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; import com.raytheon.uf.edex.database.plugin.PluginFactory; /** - * This class handles moving processed data to the archiver directory. + * This class handles saving processed data to the archiver directory. * *
  * 
@@ -84,6 +57,7 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
  *                                     Add debug information.
  * Nov 05, 2013 2499       rjpeter     Repackaged, removed config files, always compresses hdf5.
  * Nov 11, 2013 2478       rjpeter     Updated data store copy to always copy hdf5.
+ * Dec 13, 2013 2555       rjpeter     Refactored logic into DatabaseArchiveProcessor.
  * 
* * @author rjpeter @@ -100,29 +74,34 @@ public class DatabaseArchiver implements IPluginArchiver { protected SimpleDateFormat initialValue() { SimpleDateFormat df = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss.SSS"); - df.setTimeZone(TimeZone.getTimeZone("GMT")); + df.setTimeZone(TimeUtil.GMT_TIME_ZONE); return df; } }; /** Minimum time increment to archive, note based off of insertTime. */ - private static final int MIN_DURATION_MILLIS = 1000 * 60 * 30; + private static final long MIN_DURATION_MILLIS = 30 * TimeUtil.MILLIS_PER_MINUTE; /** Maximum time increment to archive, note based off of insertTime. */ - private static final int MAX_DURATION_MILLIS = 1000 * 60 * 60; + private static final long MAX_DURATION_MILLIS = 120 * TimeUtil.MILLIS_PER_MINUTE; + + /** Default batch size for database queries */ + private static final Integer defaultBatchSize = 10000; /** Job's name. */ private static final String TASK_NAME = "DB Archiver"; /** Cluster time out on lock. */ - private static final int CLUSTER_LOCK_TIMEOUT = 60000; - - /** Chunk size for I/O Buffering and Compression */ - private static final int CHUNK_SIZE = 8192; + private static final long CLUSTER_LOCK_TIMEOUT = 10 * TimeUtil.MILLIS_PER_MINUTE; /** Mapping for plug-in formatters. */ private final Map pluginArchiveFormatters; + /** Mapping for plug-in fetch size */ + private final Map pluginBatchSize; + + private final IPluginArchiveFileNameFormatter defaultFormatter = new DefaultPluginArchiveFileNameFormatter(); + /** When true dump the pdos. */ private final boolean debugArchiver; @@ -133,8 +112,7 @@ public class DatabaseArchiver implements IPluginArchiver { */ public DatabaseArchiver() { pluginArchiveFormatters = new HashMap(); - pluginArchiveFormatters.put("default", - new DefaultPluginArchiveFileNameFormatter()); + pluginBatchSize = new HashMap(); debugArchiver = Boolean.getBoolean("archive.debug.enable"); compressDatabaseFiles = Boolean .getBoolean("archive.compression.enable"); @@ -159,12 +137,10 @@ public class DatabaseArchiver implements IPluginArchiver { } } - @SuppressWarnings("rawtypes") public boolean archivePluginData(String pluginName, String archivePath) { SimpleDateFormat dateFormat = TL_DATE_FORMAT.get(); // set archive time - Calendar runTime = Calendar.getInstance(); - runTime.setTimeZone(TimeZone.getTimeZone("GMT")); + Calendar runTime = TimeUtil.newGmtCalendar(); runTime.add(Calendar.MINUTE, -30); // cluster lock, grabbing time of last successful archive @@ -195,99 +171,52 @@ public class DatabaseArchiver implements IPluginArchiver { return false; } - Set datastoreFilesToArchive = new HashSet(); - startTime = determineStartTime(pluginName, ct.getExtraInfo(), runTime, dao); Calendar endTime = determineEndTime(startTime, runTime); - Map> pdoMap = new HashMap>(); IPluginArchiveFileNameFormatter archiveFormatter = pluginArchiveFormatters .get(pluginName); if (archiveFormatter == null) { - archiveFormatter = pluginArchiveFormatters.get("default"); + archiveFormatter = defaultFormatter; } - while ((startTime != null) && (endTime != null)) { - Map> pdosToSave = archiveFormatter - .getPdosByFile(pluginName, dao, pdoMap, startTime, - endTime); + Integer batchSize = pluginBatchSize.get(pluginName); - if ((pdosToSave != null) && !pdosToSave.isEmpty()) { - recordCount += savePdoMap(pluginName, archivePath, - pdosToSave); - for (Map.Entry> entry : pdosToSave - .entrySet()) { - List pdoList = entry.getValue(); - if ((pdoList != null) && !pdoList.isEmpty() - && (pdoList.get(0) instanceof IPersistable)) { - datastoreFilesToArchive.add(entry.getKey()); - } - } - } - - startTime = endTime; - endTime = determineEndTime(startTime, runTime); + if (batchSize == null) { + batchSize = defaultBatchSize; } - if ((pdoMap != null) && !pdoMap.isEmpty()) { - recordCount += savePdoMap(pluginName, archivePath, pdoMap); - // don't forget to archive the HDF5 for the records that weren't - // saved off by the prior while block - for (Map.Entry> entry : pdoMap - .entrySet()) { - List pdoList = entry.getValue(); - if ((pdoList != null) && !pdoList.isEmpty() - && (pdoList.get(0) instanceof IPersistable)) { - datastoreFilesToArchive.add(entry.getKey()); - } + DatabaseArchiveProcessor processor = new DatabaseArchiveProcessor( + archivePath, pluginName, dao, archiveFormatter); + processor.setCompressDatabaseFiles(compressDatabaseFiles); + processor.setDebugArchiver(debugArchiver); + processor.setBatchSize(batchSize.intValue()); + + while ((startTime != null) && (endTime != null) + && !processor.isFailed()) { + statusHandler.info(pluginName + ": Checking for records from " + + TimeUtil.formatDate(startTime) + " to " + + TimeUtil.formatDate(endTime)); + + processor.reset(); + dao.processArchiveRecords(startTime, endTime, processor); + if (!processor.isFailed()) { + recordCount += processor.getRecordsSaved(); + startTime = endTime; + endTime = determineEndTime(startTime, runTime); + + // update the cluster lock with check point details + String extraInfo = dateFormat.format(startTime.getTime()); + lockHandler.setExtraInfo(extraInfo); + ClusterLockUtils.updateExtraInfoAndLockTime(TASK_NAME, + pluginName, extraInfo, System.currentTimeMillis()); } } - if (!datastoreFilesToArchive.isEmpty()) { - Compression compRequired = Compression.LZF; - - PluginProperties props = PluginRegistry.getInstance() - .getRegisteredObject(pluginName); - - if ((props != null) && (props.getCompression() != null)) { - if (compRequired.equals(Compression.valueOf(props - .getCompression()))) { - // if plugin is already compressed to the correct level, - // no additional compression required - compRequired = null; - } - } - - for (String dataStoreFile : datastoreFilesToArchive) { - IDataStore ds = DataStoreFactory.getDataStore(new File( - FileUtil.join(pluginName, dataStoreFile))); - int pathSep = dataStoreFile.lastIndexOf(File.separatorChar); - String outputDir = (pathSep > 0 ? FileUtil.join( - archivePath, pluginName, - dataStoreFile.substring(0, pathSep)) : FileUtil - .join(archivePath, pluginName, dataStoreFile)); - - try { - // copy the changed hdf5 file, does repack if - // compRequired, otherwise pure file copy - ds.copy(outputDir, compRequired, null, 0, 0); - } catch (StorageException e) { - statusHandler.handle(Priority.PROBLEM, - e.getLocalizedMessage()); - } - } - } - - // set last archive time to startTime - if (startTime != null) { - lockHandler - .setExtraInfo(dateFormat.format(startTime.getTime())); - } - if (recordCount > 0) { statusHandler.info(pluginName - + ": successfully archived " + + ": archived " + recordCount + " records in " + TimeUtil.prettyDuration(System.currentTimeMillis() @@ -315,180 +244,6 @@ public class DatabaseArchiver implements IPluginArchiver { return true; } - @SuppressWarnings("rawtypes") - protected int savePdoMap(String pluginName, String archivePath, - Map> pdoMap) - throws SerializationException, IOException { - int recordsSaved = 0; - - StringBuilder path = new StringBuilder(); - for (Map.Entry> entry : pdoMap - .entrySet()) { - path.setLength(0); - path.append(archivePath).append(File.separator).append(pluginName) - .append(File.separator).append(entry.getKey()); - // remove .h5 - if (path.lastIndexOf(".h5") == (path.length() - 3)) { - path.setLength(path.length() - 3); - } - int pathDebugLength = path.length(); - if (compressDatabaseFiles) { - path.append(".bin.gz"); - } else { - path.append(".bin"); - } - - File file = new File(path.toString()); - List pdosToSerialize = entry.getValue(); - recordsSaved += pdosToSerialize.size(); - - if (file.exists()) { - // read previous list in from disk (in gz format) - InputStream is = null; - - try { - - // created gzip'd stream - if (compressDatabaseFiles) { - is = new GZIPInputStream(new FileInputStream(file), - CHUNK_SIZE); - } else { - is = new BufferedInputStream(new FileInputStream(file), - CHUNK_SIZE); - } - - // transform back for list append - @SuppressWarnings("unchecked") - List> prev = SerializationUtil - .transformFromThrift(List.class, is); - - statusHandler.info(pluginName + ": Read in " + prev.size() - + " records from file " + file.getAbsolutePath()); - - List newList = new ArrayList( - prev.size() + pdosToSerialize.size()); - - // get set of new identifiers - Set identifierSet = new HashSet( - pdosToSerialize.size(), 1); - for (PersistableDataObject pdo : pdosToSerialize) { - identifierSet.add(pdo.getIdentifier()); - } - - // merge records by Identifier, to remove old duplicate - for (PersistableDataObject pdo : prev) { - if (!identifierSet.contains(pdo.getIdentifier())) { - newList.add(pdo); - } - } - - // release prev - prev = null; - - newList.addAll(pdosToSerialize); - pdosToSerialize = newList; - } finally { - if (is != null) { - try { - is.close(); - } catch (IOException e) { - statusHandler.error(pluginName - + ": Error occurred closing input stream", - e); - } - } - } - } - - statusHandler.info(pluginName + ": Serializing " - + pdosToSerialize.size() + " records to file " - + file.getAbsolutePath()); - - OutputStream os = null; - - try { - if (!file.getParentFile().exists()) { - file.getParentFile().mkdirs(); - } - - if (debugArchiver) { - String debugRootName = path.substring(0, pathDebugLength); - dumpPdos(pluginName, pdosToSerialize, debugRootName); - } - - // created gzip'd stream - if (compressDatabaseFiles) { - os = new GZIPOutputStream(new FileOutputStream(file), CHUNK_SIZE); - } else { - os = new BufferedOutputStream(new FileOutputStream(file), - CHUNK_SIZE); - } - - // Thrift serialize pdo list - SerializationUtil.transformToThriftUsingStream(pdosToSerialize, - os); - } finally { - if (os != null) { - try { - os.close(); - } catch (IOException e) { - statusHandler.error(pluginName - + ": Error occurred closing output stream", e); - } - } - } - } - - return recordsSaved; - } - - /** - * Dump the record information being archived to a file. - */ - @SuppressWarnings("rawtypes") - private void dumpPdos(String pluginName, - List pdosToSerialize, String debugRootName) { - StringBuilder sb = new StringBuilder(debugRootName); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss"); - sdf.setTimeZone(TimeZone.getTimeZone("GMT")); - sb.append("_").append(sdf.format(Calendar.getInstance().getTime())) - .append(".txt"); - File file = new File(sb.toString()); - Writer writer = null; - try { - PersistableDataObject[] pdoArray = pdosToSerialize - .toArray(new PersistableDataObject[0]); - writer = new BufferedWriter(new FileWriter(file)); - statusHandler.info(String.format("Dumping %s records to: %s", - pdoArray.length, file.getAbsolutePath())); - for (int i = 0; i < pdosToSerialize.size(); ++i) { - if (pdoArray[i] instanceof PluginDataObject) { - PluginDataObject pdo = (PluginDataObject) pdoArray[i]; - if (pdo.getId() != 0) { - // otherwise was read from file - writer.write("" + pdo.getId() + ":"); - writer.write(pdo.getDataURI()); - writer.write("\n"); - } - } else { - writer.write(pdoArray[i].toString()); - writer.write("\n"); - } - } - } catch (Exception e) { - statusHandler.handle(Priority.PROBLEM, e.getLocalizedMessage(), e); - } finally { - if (writer != null) { - try { - writer.close(); - } catch (Exception e) { - // Ignore - } - writer = null; - } - } - } - /** * Get the plug-in's start time for a query. * @@ -591,4 +346,17 @@ public class DatabaseArchiver implements IPluginArchiver { return this; } + + /** + * Register batch size for a plug-in. + * + * @param pluginName + * @param batchSize + * Batch Size for the plugin. Default is 10000. + * @return databaseArchiver + */ + public Object registerPluginBatchSize(String pluginName, Integer batchSize) { + pluginBatchSize.put(pluginName, batchSize); + return this; + } } diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java index 1246c23c10..a0ad88420b 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/DefaultPluginArchiveFileNameFormatter.java @@ -20,22 +20,13 @@ package com.raytheon.uf.edex.archive; import java.io.File; -import java.util.Calendar; import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider; import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider; import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; -import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.plugin.PluginDao; /** @@ -47,11 +38,12 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 20, 2012 dgilling Initial creation - * Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to - * remove excess capacity and reduce - * time to resize a growing list. - * Nov 05, 2013 2499 rjpeter Repackaged + * Apr 20, 2012 dgilling Initial creation + * Mar 12, 2013 1783 rferrel Replace ArrayList with LinkedList to + * remove excess capacity and reduce + * time to resize a growing list. + * Nov 05, 2013 2499 rjpeter Repackaged + * Dec 14, 2013 2555 rjpeter Refactored * * * @author dgilling @@ -60,89 +52,40 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; public class DefaultPluginArchiveFileNameFormatter implements IPluginArchiveFileNameFormatter { - /* * (non-Javadoc) * * @see - * com.raytheon.uf.edex.maintenance.archive.IPluginArchiveFileNameFormatter - * #getPdosByFile(java.lang.String, - * com.raytheon.uf.edex.database.plugin.PluginDao, java.util.Map, - * java.util.Calendar, java.util.Calendar) + * com.raytheon.uf.edex.archive.IPluginArchiveFileNameFormatter#getFilename + * (java.lang.String, com.raytheon.uf.edex.database.plugin.PluginDao, + * com.raytheon.uf.common.dataplugin.persist.PersistableDataObject) */ - @SuppressWarnings("rawtypes") @Override - public Map> getPdosByFile( - String pluginName, PluginDao dao, - Map> pdoMap, - Calendar startTime, Calendar endTime) - throws DataAccessLayerException { - List pdos = dao.getRecordsToArchive(startTime, - endTime); - - Set newFileEntries = new HashSet(); - if ((pdos != null) && !pdos.isEmpty()) { - if (pdos.get(0) instanceof IPersistable) { - IHDFFilePathProvider pathProvider = dao.pathProvider; - - for (PersistableDataObject pdo : pdos) { - IPersistable persistable = (IPersistable) pdo; - String path = pathProvider.getHDFPath(pluginName, - persistable) - + File.separator - + pathProvider.getHDFFileName(pluginName, - persistable); - newFileEntries.add(path); - List list = pdoMap.get(path); - if (list == null) { - list = new LinkedList(); - pdoMap.put(path, list); - } - list.add(pdo); - } + public String getFilename(String pluginName, PluginDao dao, + PersistableDataObject pdo) { + String path = null; + if (pdo instanceof IPersistable) { + IPersistable persistable = (IPersistable) pdo; + IHDFFilePathProvider pathProvider = dao.pathProvider; + path = pathProvider.getHDFPath(pluginName, persistable) + + File.separator + + pathProvider.getHDFFileName(pluginName, persistable); + } else { + String timeString = null; + PluginDataObject pluginDataObj = (PluginDataObject) pdo; + if (pdo instanceof PluginDataObject) { + Date time = pluginDataObj.getDataTime().getRefTimeAsCalendar() + .getTime(); + timeString = DefaultPathProvider.fileNameFormat.get().format( + time); } else { - // order files by refTime hours - for (PersistableDataObject pdo : pdos) { - String timeString = null; - if (pdo instanceof PluginDataObject) { - PluginDataObject pluginDataObj = (PluginDataObject) pdo; - Date time = pluginDataObj.getDataTime() - .getRefTimeAsCalendar().getTime(); - timeString = DefaultPathProvider.fileNameFormat.get() - .format(time); - } else { - // no refTime to use bounded insert query bounds - Date time = startTime.getTime(); - timeString = DefaultPathProvider.fileNameFormat.get() - .format(time); - } - - String path = pluginName + timeString; - newFileEntries.add(path); - List list = pdoMap.get(path); - if (list == null) { - list = new LinkedList(); - pdoMap.put(path, list); - } - list.add(pdo); - } - + // no refTime, use current time as last resort + timeString = DefaultPathProvider.fileNameFormat.get().format( + new Date()); } + + path = pluginName + timeString; } - - Iterator iter = pdoMap.keySet().iterator(); - Map> pdosToSave = new HashMap>( - pdoMap.size() - newFileEntries.size()); - - while (iter.hasNext()) { - String key = iter.next(); - if (!newFileEntries.contains(key)) { - pdosToSave.put(key, pdoMap.get(key)); - iter.remove(); - } - } - - return pdosToSave; + return path; } - } diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java index b0e106df98..5121fe3b0e 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/IPluginArchiveFileNameFormatter.java @@ -19,12 +19,7 @@ **/ package com.raytheon.uf.edex.archive; -import java.util.Calendar; -import java.util.List; -import java.util.Map; - import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; -import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.plugin.PluginDao; /** @@ -36,8 +31,9 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 20, 2012 dgilling Initial creation + * Apr 20, 2012 dgilling Initial creation * Nov 05, 2013 2499 rjpeter Repackaged + * Dec 13, 2013 2555 rjpeter Refactored * * * @author dgilling @@ -45,29 +41,18 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; */ public interface IPluginArchiveFileNameFormatter { - /** + * Returns base file name for the pdo. In the case of IPersistable objects, + * it should match the h5 file. * * @param pluginName + * The plugin name. * @param dao - * @param pdoMap - * The current pdos by file. This map will be merged with pdos, - * if a key was not referenced by pdos it will be removed and - * returned in the returned map for storage. - * @param startTime - * @param endTime - * @return The pdos to save to disk. If sortPdosByFiles did not store any - * entries from pdos into a file listed in currentPdoMap then that - * entry will be returned in a new map and removed from - * currentPdoMap. - * @throws DataAccessLayerException - * If the DAO is unable to retrieve the records from the - * database. + * The dao for the object. + * @param pdo + * The object to look up. + * @return */ - @SuppressWarnings("rawtypes") - public abstract Map> getPdosByFile( - String pluginName, PluginDao dao, - Map> pdoMap, - Calendar startTime, Calendar endTime) - throws DataAccessLayerException; + public String getFilename(String pluginName, PluginDao dao, + PersistableDataObject pdo); } diff --git a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java index 6af9a95018..fdfc1133ec 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java +++ b/edexOsgi/com.raytheon.uf.edex.archive/src/com/raytheon/uf/edex/archive/purge/ArchivePurger.java @@ -44,6 +44,7 @@ import com.raytheon.uf.common.time.util.TimeUtil; * number of files purged. * Sep 03, 2013 2224 rferrel Add check to enable/disable purger. * Nov 05, 2013 2499 rjpeter Repackaged + * Dec 17, 2013 2603 rjpeter Reload configuration every run of purge. * * * @author bgonzale @@ -67,6 +68,7 @@ public class ArchivePurger { timer.start(); statusHandler.info("Archive Purge started."); ArchiveConfigManager manager = ArchiveConfigManager.getInstance(); + manager.reset(); Collection archives = manager.getArchives(); for (ArchiveConfig archive : archives) { ITimer archiveTimer = TimeUtil.getTimer(); diff --git a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml index 8cc6e745f7..bb2d928ab0 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml +++ b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/PROCESSED_DATA.xml @@ -28,6 +28,8 @@ * Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field. * Nov 05, 2013 2497 rferrel Change root directory. * Nov 13, 2013 2549 rferrel Changes to GFE and modelsounding. + * Dec 12, 2013 2624 rferrel Document Julian time stamp. + * Dec 13, 2013 2555 rjpeter Updated all to use dirPatterns. * * @author rferrel * @version 1.0 @@ -66,14 +68,19 @@ single table entry. - Optional tag to determine what type of time stamp is being used to get files/directories for retention and case creation. The value dictates how many groupings in the s and/or are - used to get the time stamp for a file. The four values are: - Date - (default) the time stamp is made up of four groups in the patterns: year, month, day and hour. + used to get the time stamp for a file. The five values are: + Date - (default) the time stamp is made up of 3 or 4 groups in the patterns: year, month, day and optional hour. + Julian - The time stamp is made up of 2 or 3 groups in the patterns: year, day_of_year and optional hour. + if the year is less then 100 it is adjust to a year prior to or no more then a month into the future + of the current simulate year. EpochSec - The time stamp has one group in the patterns which is the epoch time in seconds. EpochMS - The time stamp has one group in the patterns which is the epoch time in milliseconds. File - No group is used to get the time stamp. Instead use the files date of last modification. - Required tag when has any value but File. Date - A comma separated list of 3 or 4 numbers which are in order the index for year, month, day and hour. When only 3 numbers the hour is value is 23. + Julian - A comma separated list of 2 or 3 numbers which are in order the index for year, day of year, and hour. + When only two numbers the hour value is 23. EpochSec - A number which is the index for the epoch in seconds. EpochMS - A number which is the index for the epoch in milliseconds. File - Not needed since no group is used to get the time stamp. @@ -116,7 +123,7 @@ {1} Date 2,3,4,5 - [^/]*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* + .*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* The first looks for files matching the in the directories acars, airep, airmet or taf. @@ -137,8 +144,7 @@ Decision Assistance 168 - (cwat|fog|ffmp|fssobs|preciprate|qpf|scan|vil) - .*(\d{4})-(\d{2})-(\d{2})-(\d{2}).* + (cwat|fog|ffmp|fssobs|preciprate|qpf|scan|vil)/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).* {1} 2,3,4,5 @@ -147,13 +153,12 @@ GFE 168 - gfe/(.*)/(.*)/(\d{4})_(\d{2})_(\d{2})_(\d{2})\d{2} + gfe/(.*)/(.*)/(\d{4})_(\d{2})_(\d{2})_(\d{2})\d{2}.* {1} - {2} 3,4,5,6 - gfe/(.*)/(.*) - .*_(\d{4})(\d{2})(\d{2})_.* + gfe/(.*)/(.*)/.*_(\d{4})(\d{2})(\d{2})_.* {1} - {2} 3,4,5 @@ -162,8 +167,7 @@ Local 168 - (ldadhydro|ldadmesonet|ldadprofiler|ldad_manual|mesowest|qc) - .*(\d{4})-(\d{2})-(\d{2})-(\d{2}).* + (ldadhydro|ldadmesonet|ldadprofiler|ldad_manual|mesowest|qc)/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).* {1} 2,3,4,5 @@ -172,54 +176,48 @@ Model 168 - (grid)/(.*)/(.*) + (grid)/(.*)/(.*)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})-.* {2} 4,5,6,7 - .*-(\d{4})-(\d{2})-(\d{2})-(\d{2})-.* - (modelsounding)/(.*)/.* - (bufrmos)(.*) + (modelsounding)/(.*)/.*/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).* + (bufrmos)(.*)/.*(\d{4})-(\d{2})-(\d{2})-(\d{2}) {1} - {2} 3,4,5,6 - .*(\d{4})-(\d{2})-(\d{2})-(\d{2}).* Products 168 - (airmet|atcf|aww|bufrncwf|ccfp|convsigmet|cwa|ffg|intlsigmet|nonconvsigmet|stormtrack|taf|tcg|tcm|tcs|text|vaa|warning|wcp) - (bufrsigwx|redbook)/.* + (airmet|atcf|aww|bufrncwf|ccfp|convsigmet|cwa|ffg|intlsigmet|nonconvsigmet|stormtrack|taf|tcg|tcm|tcs|text|vaa|warning|wcp)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) + (bufrsigwx|redbook)/.*/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) {1} 2,3,4,5 - [^/]*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* Observation 168 - (acars|airep|binlightning|bufrascat|bufrhdw|bufrmthdw|bufrssmi|idft|lsr|obs|pirep|recco|svrwx) - (sfcobs)/.* + (acars|airep|binlightning|bufrascat|bufrhdw|bufrmthdw|bufrssmi|idft|lsr|obs|pirep|recco|svrwx)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) + (sfcobs)/.*/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) {1} 2,3,4,5 - .*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* Satellite 168 - satellite/(.*)/(.*) - .*-(\d{4})-(\d{2})-(\d{2})-(\d{2}).* + satellite/(.*)/(.*)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) 3,4,5,6 {1} - (mcidas|viirs)/.*/.*/.*/.* - .*-(\d{4})-(\d{2})-(\d{2})-(\d{2}).* + (mcidas|viirs)/.*/.*/.*/.*/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) 2,3,4,5 {1} @@ -228,10 +226,9 @@ Profiles 168 - (acarssounding|bufrua|goessounding|poessounding|profiler) + (acarssounding|bufrua|goessounding|poessounding|profiler)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) {1} 2,3,4,5 - .*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* @@ -239,10 +236,9 @@ radar 168 - radar/(.*)/(.*) + radar/(.*)/(.*)/.*-(\d{4})-(\d{2})-(\d{2})-(\d{2}) {1} 3,4,5,6 - .*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* diff --git a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml index c9aff14e97..c88591683c 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml +++ b/edexOsgi/com.raytheon.uf.edex.archive/utility/common_static/base/archiver/purger/RAW_DATA.xml @@ -26,6 +26,7 @@ * Jun 20, 2013 1966 rferrel Initial creation * Aug 05, 2013 2224 rferrel Changes to add dataSet tags. * Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field. + * Dec 12, 2013 2624 rferrel Document Julian time stamp. * * @author rferrel * @version 1.0 @@ -64,14 +65,19 @@ single table entry. - Optional tag to determine what type of time stamp is being used to get files/directories for retention and case creation. The value dictates how many groupings in the s and/or are - used to get the time stamp for a file. The four values are: - Date - (default) the time stamp is made up of four groups in the patterns: year, month, day and hour. + used to get the time stamp for a file. The five values are: + Date - (default) the time stamp is made up of 3 or 4 groups in the patterns: year, month, day and optional hour. + Julian - The time stamp is made up of 2 or 3 groups in the patterns: year, day_of_year and optional hour. + if the year is less then 100 it is adjust to a year prior to or no more then a month into the future + of the current simulate year. EpochSec - The time stamp has one group in the patterns which is the epoch time in seconds. EpochMS - The time stamp has one group in the patterns which is the epoch time in milliseconds. File - No group is used to get the time stamp. Instead use the files date of last modification. - Required tag when has any value but File. Date - A comma separated list of 3 or 4 numbers which are in order the index for year, month, day and hour. When only 3 numbers the hour is value is 23. + Julian - A comma separated list of 2 or 3 numbers which are in order the index for year, day of year, and hour. + When only two numbers the hour value is 23. EpochSec - A number which is the index for the epoch in seconds. EpochMS - A number which is the index for the epoch in milliseconds. File - Not needed since no group is used to get the time stamp. @@ -114,7 +120,7 @@ {1} Date 2,3,4,5 - [^/]*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* + .*-(\d{4})-(\d{2})-(\d{2})-(\d{2})\..* The first looks for files matching the in the directories acars, airep, airmet or taf. diff --git a/edexOsgi/com.raytheon.uf.edex.database/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.database/META-INF/MANIFEST.MF index a68d121ade..a32b47bf5f 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.database/META-INF/MANIFEST.MF @@ -28,6 +28,7 @@ Export-Package: com.raytheon.uf.edex.database, com.raytheon.uf.edex.database.handlers, com.raytheon.uf.edex.database.init, com.raytheon.uf.edex.database.plugin, + com.raytheon.uf.edex.database.processor, com.raytheon.uf.edex.database.purge, com.raytheon.uf.edex.database.query, com.raytheon.uf.edex.database.status, diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/cluster/ClusterLockUtils.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/cluster/ClusterLockUtils.java index 55b6439c83..375f1546c5 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/cluster/ClusterLockUtils.java +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/cluster/ClusterLockUtils.java @@ -50,7 +50,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig; * Apr 28, 2010 #5050 rjpeter Initial creation from SmartInitTransaction. * Aug 26, 2013 #2272 bkowal Add a function to see if a cluster suffix has * been specified via the environment. - * + * Dec 13, 2013 2555 rjpeter Added updateExtraInfoAndLockTime and javadoc. * * * @author rjpeter @@ -116,6 +116,13 @@ public class ClusterLockUtils { } /** + * Attempts to lock based on the taskName/details and the specified + * validTime for checkTime. If waitForRunningToFinish it will sleep and then + * attempt to lock again until it achieves a lock other than already + * running. The waitForRunningToFinish is not part of the main lock logic + * due to checkTime being keyed off something other than System clock. If + * the validTime is older than the current validTime for the lock, an OLD + * LockState will be returned. * * @param taskName * @param details @@ -131,6 +138,11 @@ public class ClusterLockUtils { } /** + * Attempts to lock based on the taskName/details and the specified + * lockHandler. If waitForRunningToFinish it will sleep and then attempt to + * lock again until it achieves a lock other than already running. The + * waitForRunningToFinish is not part of the main lock logic due to + * checkTime being keyed off something other than System clock. * * @param taskName * @param details @@ -214,6 +226,9 @@ public class ClusterLockUtils { } /** + * Updates the lock time for the specified lock. IMPORTANT: No tracking is + * done to ensure caller has lock, so only use when you know you have a + * valid lock. * * @param taskName * @param details @@ -268,7 +283,9 @@ public class ClusterLockUtils { } /** - * Updates the extra info field for a cluster task + * Updates the extra info field for a cluster task. IMPORTANT: No tracking + * is done to ensure caller has lock, so only use when you know you have a + * valid lock. * * @param taskName * The name of the task @@ -327,6 +344,70 @@ public class ClusterLockUtils { } /** + * Updates the extra info and lock time fields for a cluster task. + * IMPORTANT: No tracking is done to ensure caller has lock, so only use + * when you know you have a valid lock. + * + * @param taskName + * The name of the task + * @param details + * The details associated with the task + * @param extraInfo + * The new extra info to set + * @oaran lockTime The lock time to set + * @return True if the update was successful, else false if the update + * failed + */ + public static boolean updateExtraInfoAndLockTime(String taskName, + String details, String extraInfo, long lockTime) { + CoreDao cd = new CoreDao(DaoConfig.DEFAULT); + Session s = null; + Transaction tx = null; + ClusterTask ct = null; + boolean rval = true; + + try { + s = cd.getHibernateTemplate().getSessionFactory().openSession(); + tx = s.beginTransaction(); + ClusterTaskPK pk = new ClusterTaskPK(); + pk.setName(taskName); + pk.setDetails(details); + + ct = getLock(s, pk, true); + ct.setExtraInfo(extraInfo); + ct.setLastExecution(lockTime); + s.update(ct); + tx.commit(); + } catch (Throwable t) { + handler.handle(Priority.ERROR, + "Error processing update lock time for cluster task [" + + taskName + "/" + details + "]", t); + rval = false; + + if (tx != null) { + try { + tx.rollback(); + } catch (HibernateException e) { + handler.handle(Priority.ERROR, + "Error rolling back cluster task lock transaction", + e); + } + } + } finally { + if (s != null) { + try { + s.close(); + } catch (HibernateException e) { + handler.handle(Priority.ERROR, + "Error closing cluster task lock session", e); + } + } + } + return rval; + } + + /** + * Looks up the specified cluster lock. * * @param taskName * @param details @@ -388,6 +469,9 @@ public class ClusterLockUtils { } /** + * Unlocks the given cluster lock. If clear time is set, time field will be + * reset to the epoch time. This can be useful when wanting the next check + * to always succeed. * * @param taskName * @param details @@ -500,6 +584,7 @@ public class ClusterLockUtils { } /** + * Deletes the specified cluster lock. * * @param taskName * @param details @@ -554,11 +639,22 @@ public class ClusterLockUtils { return rval; } + /** + * Looks up and returns the specified cluster lock. If the lock does not + * exist and create flag is set, the lock will be created. This is done + * using a Master lock to ensure isolation among all transactions. + * + * @param s + * @param pk + * @param create + * @return + * @throws HibernateException + */ private static ClusterTask getLock(Session s, ClusterTaskPK pk, boolean create) throws HibernateException { ClusterTask ct = (ClusterTask) s.get(ClusterTask.class, pk, LockOptions.UPGRADE); - if (ct == null && create) { + if ((ct == null) && create) { getMasterLock(s); // now have master lock, verify new row hasn't already been @@ -577,6 +673,13 @@ public class ClusterLockUtils { return ct; } + /** + * Returns the master lock. + * + * @param s + * @return + * @throws HibernateException + */ private static ClusterTask getMasterLock(Session s) throws HibernateException { ClusterTaskPK masterNewRowLockId = new ClusterTaskPK(); @@ -597,6 +700,12 @@ public class ClusterLockUtils { return masterLock; } + /** + * Returns all cluster locks that match the specified name. + * + * @param name + * @return + */ @SuppressWarnings("unchecked") public static List getLocks(String name) { StatelessSession sess = null; @@ -611,15 +720,15 @@ public class ClusterLockUtils { crit.add(nameCrit); tasks = crit.list(); } catch (Throwable e) { - // TODO - e.printStackTrace(); + handler.handle(Priority.ERROR, + "Error retrieving cluster locks for name: " + name, e); } finally { if (sess != null) { try { sess.close(); } catch (HibernateException e) { - // TODO - e.printStackTrace(); + handler.handle(Priority.ERROR, + "Error closing cluster task getLocks session", e); } } } diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java index e7b6865c95..29cdbb6950 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/dao/CoreDao.java @@ -21,7 +21,6 @@ package com.raytheon.uf.edex.database.dao; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.Serializable; @@ -68,7 +67,9 @@ import com.raytheon.uf.common.dataquery.db.QueryResult; import com.raytheon.uf.common.dataquery.db.QueryResultRow; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; +import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.edex.database.DataAccessLayerException; +import com.raytheon.uf.edex.database.processor.IDatabaseProcessor; import com.raytheon.uf.edex.database.query.DatabaseQuery; /** @@ -94,7 +95,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * 5/14/08 1076 brockwoo Fix for distinct with multiple properties * Oct 10, 2012 1261 djohnson Incorporate changes to DaoConfig, add generic to {@link IPersistableDataObject}. * Apr 15, 2013 1868 bsteffen Rewrite mergeAll in PluginDao. - * + * Dec 13, 2013 2555 rjpeter Added processByCriteria and fixed Generics warnings. * * * @author bphillip @@ -242,13 +243,13 @@ public class CoreDao extends HibernateDaoSupport { return loadAll(daoClass); } - @SuppressWarnings("unchecked") public List loadAll(final Class entity) { - return (List) txTemplate.execute(new TransactionCallback() { + return txTemplate.execute(new TransactionCallback>() { @Override - public Object doInTransaction(TransactionStatus status) { + @SuppressWarnings("unchecked") + public List doInTransaction(TransactionStatus status) { HibernateTemplate ht = getHibernateTemplate(); - return ht.loadAll(entity); + return (List) ht.loadAll(entity); } }); } @@ -278,10 +279,10 @@ public class CoreDao extends HibernateDaoSupport { * Null if not found */ public PersistableDataObject queryById(final Serializable id) { - @SuppressWarnings("unchecked") - PersistableDataObject retVal = (PersistableDataObject) txTemplate - .execute(new TransactionCallback() { + PersistableDataObject retVal = txTemplate + .execute(new TransactionCallback>() { @Override + @SuppressWarnings("unchecked") public PersistableDataObject doInTransaction( TransactionStatus status) { return (PersistableDataObject) getHibernateTemplate() @@ -299,10 +300,10 @@ public class CoreDao extends HibernateDaoSupport { * @return The object */ public PersistableDataObject queryById(final PluginDataObject id) { - @SuppressWarnings("unchecked") - PersistableDataObject retVal = (PersistableDataObject) txTemplate - .execute(new TransactionCallback() { + PersistableDataObject retVal = txTemplate + .execute(new TransactionCallback>() { @Override + @SuppressWarnings("unchecked") public PersistableDataObject doInTransaction( TransactionStatus status) { DetachedCriteria criteria = DetachedCriteria.forClass( @@ -333,12 +334,12 @@ public class CoreDao extends HibernateDaoSupport { * Maximum number of results to return * @return A list of similar objects */ - @SuppressWarnings("unchecked") public List> queryByExample( final PersistableDataObject obj, final int maxResults) { - List> retVal = (List>) txTemplate - .execute(new TransactionCallback() { + List> retVal = txTemplate + .execute(new TransactionCallback>>() { @Override + @SuppressWarnings("unchecked") public List> doInTransaction( TransactionStatus status) { return getHibernateTemplate().findByExample(obj, 0, @@ -377,8 +378,8 @@ public class CoreDao extends HibernateDaoSupport { int rowsDeleted = 0; try { // Get a session and create a new criteria instance - rowsDeleted = (Integer) txTemplate - .execute(new TransactionCallback() { + rowsDeleted = txTemplate + .execute(new TransactionCallback() { @Override public Integer doInTransaction(TransactionStatus status) { String queryString = query.createHQLDelete(); @@ -414,8 +415,8 @@ public class CoreDao extends HibernateDaoSupport { List queryResult = null; try { // Get a session and create a new criteria instance - queryResult = (List) txTemplate - .execute(new TransactionCallback() { + queryResult = txTemplate + .execute(new TransactionCallback>() { @Override public List doInTransaction(TransactionStatus status) { String queryString = query.createHQLQuery(); @@ -444,6 +445,68 @@ public class CoreDao extends HibernateDaoSupport { return queryResult; } + /** + * Queries the database in batches using a DatabaseQuery object and send + * each batch to processor. + * + * @param query + * The query object + * @param processor + * The processor object + * @return The number of results processed + * @throws DataAccessLayerException + * If the query fails + */ + public int processByCriteria(final DatabaseQuery query, + final IDatabaseProcessor processor) throws DataAccessLayerException { + int rowsProcessed = 0; + try { + // Get a session and create a new criteria instance + rowsProcessed = txTemplate + .execute(new TransactionCallback() { + @Override + public Integer doInTransaction(TransactionStatus status) { + String queryString = query.createHQLQuery(); + Query hibQuery = getSession(false).createQuery( + queryString); + try { + query.populateHQLQuery(hibQuery, + getSessionFactory()); + } catch (DataAccessLayerException e) { + throw new org.hibernate.TransactionException( + "Error populating query", e); + } + + if (processor.getBatchSize() > 0) { + hibQuery.setMaxResults(processor.getBatchSize()); + } else if (query.getMaxResults() != null) { + hibQuery.setMaxResults(query.getMaxResults()); + } + + List results = null; + boolean continueProcessing = false; + int count = 0; + + do { + hibQuery.setFirstResult(count); + results = hibQuery.list(); + continueProcessing = processor.process(results); + count += results.size(); + getSession().clear(); + } while (continueProcessing && (results != null) + && (results.size() > 0)); + processor.finish(); + return count; + } + }); + + } catch (TransactionException e) { + throw new DataAccessLayerException("Transaction failed", e); + } + + return rowsProcessed; + } + public void deleteAll(final List objs) { txTemplate.execute(new TransactionCallbackWithoutResult() { @Override @@ -643,8 +706,8 @@ public class CoreDao extends HibernateDaoSupport { */ public QueryResult executeHQLQuery(final String hqlQuery) { - QueryResult result = (QueryResult) txTemplate - .execute(new TransactionCallback() { + QueryResult result = txTemplate + .execute(new TransactionCallback() { @Override public QueryResult doInTransaction(TransactionStatus status) { Query hibQuery = getSession(false) @@ -697,8 +760,8 @@ public class CoreDao extends HibernateDaoSupport { */ public int executeHQLStatement(final String hqlStmt) { - int queryResult = (Integer) txTemplate - .execute(new TransactionCallback() { + int queryResult = txTemplate + .execute(new TransactionCallback() { @Override public Integer doInTransaction(TransactionStatus status) { Query hibQuery = getSession(false).createQuery(hqlStmt); @@ -722,8 +785,8 @@ public class CoreDao extends HibernateDaoSupport { public Object[] executeSQLQuery(final String sql) { long start = System.currentTimeMillis(); - List queryResult = (List) txTemplate - .execute(new TransactionCallback() { + List queryResult = txTemplate + .execute(new TransactionCallback>() { @Override public List doInTransaction(TransactionStatus status) { return getSession(false).createSQLQuery(sql).list(); @@ -737,8 +800,8 @@ public class CoreDao extends HibernateDaoSupport { public List executeCriteriaQuery(final List criterion) { long start = System.currentTimeMillis(); - List queryResult = (List) txTemplate - .execute(new TransactionCallback() { + List queryResult = txTemplate + .execute(new TransactionCallback>() { @Override public List doInTransaction(TransactionStatus status) { @@ -772,8 +835,8 @@ public class CoreDao extends HibernateDaoSupport { public int executeSQLUpdate(final String sql) { long start = System.currentTimeMillis(); - int updateResult = (Integer) txTemplate - .execute(new TransactionCallback() { + int updateResult = txTemplate + .execute(new TransactionCallback() { @Override public Integer doInTransaction(TransactionStatus status) { return getSession(false).createSQLQuery(sql) @@ -1006,27 +1069,16 @@ public class CoreDao extends HibernateDaoSupport { * If reading the file fails */ public void runScript(File script) throws DataAccessLayerException { - FileInputStream fileIn; + byte[] bytes = null; try { - fileIn = new FileInputStream(script); + bytes = FileUtil.file2bytes(script); } catch (FileNotFoundException e) { throw new DataAccessLayerException( "Unable to open input stream to sql script: " + script); - } - byte[] bytes = null; - try { - bytes = new byte[fileIn.available()]; - fileIn.read(bytes); } catch (IOException e) { throw new DataAccessLayerException( "Unable to read script contents for script: " + script); } - try { - fileIn.close(); - } catch (IOException e) { - throw new DataAccessLayerException( - "Error closing file input stream to: " + script); - } runScript(new StringBuffer().append(new String(bytes))); } diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java index 6c7d2e2858..8b155100ea 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java @@ -21,7 +21,6 @@ package com.raytheon.uf.edex.database.plugin; import java.io.File; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; @@ -52,7 +51,6 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.annotations.DataURIUtil; -import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider; import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider; import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject; @@ -74,11 +72,11 @@ import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.common.serialization.SerializationUtil; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.time.util.TimeUtil; -import com.raytheon.uf.common.util.FileUtil; import com.raytheon.uf.edex.core.EdexException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.DaoConfig; +import com.raytheon.uf.edex.database.processor.IDatabaseProcessor; import com.raytheon.uf.edex.database.purge.PurgeLogger; import com.raytheon.uf.edex.database.purge.PurgeRule; import com.raytheon.uf.edex.database.purge.PurgeRuleSet; @@ -114,6 +112,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * May 16, 2013 1869 bsteffen Rewrite dataURI property mappings. * Aug 30, 2013 2298 rjpeter Make getPluginName abstract * Oct 07, 2013 2392 rjpeter Updated to pass null productKeys as actual null instead of string null. + * Dec 13, 2013 2555 rjpeter Refactored archiving logic into processArchiveRecords. * * * @author bphillip @@ -457,7 +456,7 @@ public abstract class PluginDao extends CoreDao { for (IPersistable persistable : persistables) { try { - if (((PersistableDataObject) persistable) + if (((PersistableDataObject) persistable) .isOverwriteAllowed()) { if (replaceDataStore == null) { replaceDataStore = DataStoreFactory @@ -1695,105 +1694,18 @@ public abstract class PluginDao extends CoreDao { return null; } - public void archiveData(String archivePath, Calendar insertStartTime, - Calendar insertEndTime) throws DataAccessLayerException, - SerializationException, IOException { - List pdos = getRecordsToArchive(insertStartTime, - insertEndTime); - if ((pdos != null) && (pdos.size() > 0)) { - // map of file to list of pdo - Map> pdoMap = new HashMap>(); - if (pdos.get(0) instanceof IPersistable) { - IHDFFilePathProvider pathProvider = this.pathProvider; - - for (PersistableDataObject pdo : pdos) { - IPersistable persistable = (IPersistable) pdo; - String path = pathProvider.getHDFPath(pluginName, - persistable) - + File.separator - + pathProvider.getHDFFileName(pluginName, - persistable); - List list = pdoMap.get(path); - if (list == null) { - list = new ArrayList(pdos.size()); - pdoMap.put(path, list); - } - list.add(pdo); - } - } else { - // order files by refTime hours - for (PersistableDataObject pdo : pdos) { - String timeString = null; - if (pdo instanceof PluginDataObject) { - PluginDataObject pluginDataObj = (PluginDataObject) pdo; - Date time = pluginDataObj.getDataTime() - .getRefTimeAsCalendar().getTime(); - timeString = DefaultPathProvider.fileNameFormat.get() - .format(time); - } else { - // no refTime to use bounded insert query bounds - Date time = insertStartTime.getTime(); - timeString = DefaultPathProvider.fileNameFormat.get() - .format(time); - } - - String path = pluginName + timeString; - List list = pdoMap.get(path); - if (list == null) { - list = new ArrayList(pdos.size()); - pdoMap.put(path, list); - } - list.add(pdo); - } - - } - - for (Map.Entry> entry : pdoMap - .entrySet()) { - String path = archivePath + File.separator + pluginName - + File.separator + entry.getKey(); - - // remove .h5 - int index = path.lastIndexOf('.'); - if ((index > 0) && ((path.length() - index) < 5)) { - // ensure its end of string in case extension is - // dropped/changed - path = path.substring(0, index); - } - - path += ".bin.gz"; - - File file = new File(path); - - if (file.exists()) { - // pull the - } - - // Thrift serialize pdo list - byte[] data = SerializationUtil.transformToThrift(entry - .getValue()); - - SerializationUtil.transformFromThrift(data); - - // save list to disk (in gz format?) - FileUtil.bytes2File(data, file, true); - } - } - - } - - @SuppressWarnings("unchecked") - public List getRecordsToArchive( - Calendar insertStartTime, Calendar insertEndTime) + public int processArchiveRecords(Calendar insertStartTime, + Calendar insertEndTime, IDatabaseProcessor processor) throws DataAccessLayerException { DatabaseQuery dbQuery = new DatabaseQuery(this.getDaoClass()); dbQuery.addQueryParam("insertTime", insertStartTime, QueryOperand.GREATERTHANEQUALS); dbQuery.addQueryParam("insertTime", insertEndTime, QueryOperand.LESSTHAN); + dbQuery.addOrder("insertTime", true); dbQuery.addOrder("dataTime.refTime", true); - return (List) this.queryByCriteria(dbQuery); + return this.processByCriteria(dbQuery, processor); } protected static class DuplicateCheckStat { diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java new file mode 100644 index 0000000000..9fd67b00cc --- /dev/null +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/processor/IDatabaseProcessor.java @@ -0,0 +1,69 @@ +/** + * This software was developed and / or modified by Raytheon Company, + * pursuant to Contract DG133W-05-CQ-1067 with the US Government. + * + * U.S. EXPORT CONTROLLED TECHNICAL DATA + * This software product contains export-restricted data whose + * export/transfer/disclosure is restricted by U.S. law. Dissemination + * to non-U.S. persons whether in the United States or abroad requires + * an export license or other authorization. + * + * Contractor Name: Raytheon Company + * Contractor Address: 6825 Pine Street, Suite 340 + * Mail Stop B8 + * Omaha, NE 68106 + * 402.291.0100 + * + * See the AWIPS II Master Rights File ("Master Rights File.pdf") for + * further licensing information. + **/ +package com.raytheon.uf.edex.database.processor; + +import java.util.List; + +/** + * Interface for working with a batched set of results inside a database + * session. Process can be called multiple times based on the batchSize of the + * processor. + * + *
+ * 
+ * SOFTWARE HISTORY
+ * 
+ * Date         Ticket#    Engineer    Description
+ * ------------ ---------- ----------- --------------------------
+ * Dec 9, 2013  2555      rjpeter     Initial creation
+ * 
+ * + * @author rjpeter + * @version 1.0 + */ + +public interface IDatabaseProcessor { + /** + * Perform any processing on this batch of objects. + * + * @param objects + * @return True if should continue processing, false otherwise. + */ + public boolean process(List objects); + + /** + * Perform any post processing if necessary. + */ + public void finish(); + + /** + * Get the batch size of the query. + * + * @return + */ + public int getBatchSize(); + + /** + * Set the batch size of the query. + * + * @param batchSize + */ + public void setBatchSize(int batchSize); +} diff --git a/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py b/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py index 0664a39c13..1c07e92756 100644 --- a/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py +++ b/edexOsgi/com.raytheon.uf.edex.dissemination/utility/edex_static/base/dissemination/handleOUP.py @@ -35,6 +35,7 @@ # 08/17/12 DR 15304 D. Friedman Use unique output file names # 10/12/12 DR 15418 D. Friedman Use unique attachment file names # 11/20/13 DR 16777 D. Friedman Add a test mode. +# 12/05/16 DR 16842 D. Friedman Do not set product ID on MhsMessage # # @@ -412,7 +413,6 @@ def sendWANMsg(productId, prodPathName, receivingSite, handling, if attachedFilename: mhsMsg.addEnclosure(attachedFilename) - mhsMsg.setProductId(productId) #mhsMsg.setBodyFile(prodPathName) mhsMsg.addEnclosure(prodPathName) if priority == 0: diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml index 7db114019d..e5e7eff311 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/DPADecoder-spring.xml @@ -18,11 +18,6 @@ - - - - diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/hpeDHRDecoder-spring.xml b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/hpeDHRDecoder-spring.xml index 40cdd388fc..c3e509e9dd 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/hpeDHRDecoder-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/hpeDHRDecoder-spring.xml @@ -18,11 +18,6 @@ - - - - diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/ohd-common.xml b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/ohd-common.xml index 9a725f5707..a87ebe243a 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/ohd-common.xml +++ b/edexOsgi/com.raytheon.uf.edex.ohd/res/spring/ohd-common.xml @@ -36,9 +36,18 @@ - - + + - - + + + + + + + + + \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-common.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-common.xml index 69e7038955..1962c3d86c 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-common.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.aww/res/spring/aww-common.xml @@ -20,4 +20,9 @@ + + + + \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-common.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-common.xml index f3432f11ea..ab1b4b5ece 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-common.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-common.xml @@ -18,6 +18,8 @@ - - + + + \ No newline at end of file diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml b/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml index d9bbe3e33f..5969e8d4be 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.nctext/res/spring/nctext-ingest.xml @@ -16,11 +16,6 @@ - - - -