Merge branch 'omaha_15.1.1' into field_15.1.1

Change-Id: I46ed5379753021871381803e9e84e1c6a37ded10

Former-commit-id: a35b2c8cbd4e07bf92fbc32d8548f9fa0c0a9dd5
This commit is contained in:
Ana Rivera 2015-03-04 18:30:37 +00:00
commit 7065fe20db
37 changed files with 970 additions and 675 deletions

View file

@ -1,72 +1,9 @@
<configuration debug="false" scan="true">
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<param name="Threshold" value="INFO" />
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/logback-edex-appenders.xml" />
<!-- hibernate log -->
<appender name="HibernateLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-hibernate-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<logger name="com.raytheon">
<level value="INFO"/>
</logger>
<!-- used by c3p0 -->
<logger name="com.mchange">
<level value="ERROR"/>
</logger>
<logger name="com.tc">
<level value="WARN"/>
</logger>
<logger name="mx4j">
<level value="ERROR"/>
</logger>
<logger name="org.apache">
<level value="INFO"/>
</logger>
<logger name="org.apache.activemq.spring">
<level value="WARN"/>
</logger>
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
<level value="ERROR"/>
</logger>
<logger name="org.apache.camel.core.xml">
<level value="WARN"/>
</logger>
<logger name="org.apache.qpid">
<level value="INFO"/>
</logger>
<logger name="org.apache.qpid.client.BasicMessageProducer_0_10">
<level value="WARN"/>
</logger>
<logger name="org.apache.xbean.spring">
<level value="WARN"/>
</logger>
<logger name="org.quartz">
<level value="ERROR"/>
</logger>
<logger name="org.springframework">
<level value="ERROR"/>
</logger>
<logger name="org.hibernate" additivity="false">
<level value="ERROR"/>
<appender-ref ref="HibernateLog" />
</logger>
<logger name="org.geotools">
<level value="WARN"/>
</logger>
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-hibernate-logger.xml" />
<!-- <logger name="com.raytheon.uf.edex.datadelivery.provideragent" additivity="false">
<level value="INFO"/>

View file

@ -0,0 +1,23 @@
<included>
<!-- Appenders shared by all EDEX logback files. -->
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="asyncConsole" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="console" />
</appender>
<!-- Performance log -->
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>performance</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="PerformanceLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="PerformanceLog" />
</appender>
</included>

View file

@ -0,0 +1,17 @@
<included>
<!-- hibernate log -->
<appender name="HibernateLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>hibernate</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder">
<trace>false</trace>
</encoder>
</appender>
<logger name="org.hibernate" additivity="false">
<level value="ERROR"/>
<appender-ref ref="HibernateLog" />
</logger>
</included>

View file

@ -0,0 +1,7 @@
<included>
<!-- Loggers shared only by logback-request.xml and logback-ingest.xml -->
<logger name="com.raytheon.edex.services.NotifySrv">
<level value="WARN"/>
</logger>
</included>

View file

@ -0,0 +1,46 @@
<included>
<!-- Logger's shared by all EDEX logback files. -->
<logger name="com.raytheon">
<level value="INFO"/>
</logger>
<logger name="PerformanceLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="PerformanceLogAsync" />
</logger>
<!-- used by c3p0 -->
<logger name="com.mchange">
<level value="ERROR"/>
</logger>
<logger name="mx4j">
<level value="ERROR"/>
</logger>
<logger name="org.apache">
<level value="INFO"/>
</logger>
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
<level value="ERROR"/>
</logger>
<logger name="org.apache.camel.core.xml">
<level value="WARN"/>
</logger>
<logger name="org.apache.qpid">
<level value="INFO"/>
</logger>
<logger name="org.apache.qpid.client.BasicMessageProducer_0_10">
<level value="WARN"/>
</logger>
<logger name="org.apache.xbean.spring">
<level value="WARN"/>
</logger>
<logger name="org.quartz">
<level value="ERROR"/>
</logger>
<logger name="org.springframework">
<level value="ERROR"/>
</logger>
<logger name="org.geotools">
<level value="WARN"/>
</logger>
</included>

View file

@ -0,0 +1,8 @@
<included>
<property scope="context" name="log.dir.home" value="${edex.home}"/>
<property scope="context" name="log.file.base" value ="edex"/>
<property scope="context" name="log.file.mode" value="${edex.run.mode}"/>
<!-- Only define when not wanting to use the UF Standard.
<property scope="context" name="log.message.pattern" value="%-5p %d [%t] %c{0}: %m%n"/>
-->
</included>

View file

@ -1,231 +1,103 @@
<configuration debug="false" scan="true">
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="asyncConsole" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="console" />
</appender>
<!-- hibernate log -->
<appender name="HibernateLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-hibernate-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n %nopex</pattern>
</encoder>
</appender>
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/logback-edex-appenders.xml" />
<!-- shef log -->
<appender name="shef" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-shef-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>shef</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- activeTableChange log -->
<appender name="activeTableChangeLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-activeTableChange-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>activeTableChange</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="activeTableChangeLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="activeTableChangeLog" />
</appender>
<logger name="ActiveTableChange" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="activeTableChangeLogAsync" />
</logger>
<!-- Purge log -->
<appender name="purge" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-purge-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>purge</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- RouteFailedLog log -->
<appender name="RouteFailedLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-unrecognized-files-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>unrecognized-files</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- ShefPerformanceLog log -->
<appender name="ShefPerfLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-shef-performance-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>shef-performance</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- Gen Areal FFG log -->
<appender name="GenArealFFG" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-gen_areal_ffg-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>gen_areal_ffg</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- Gen Areal QPE log -->
<appender name="GenArealQPE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-gen_areal_qpe-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>gen_areal_qpe</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- Trigger log -->
<appender name="FailedTriggerLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-trigger-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>trigger</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- ohd log -->
<appender name="OhdLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-ohd-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>ohd</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.status.logback.ThreadBasedAppender">
<maxHistory>30</maxHistory>
<patternLayout>%-5p %d [%t] %c{0}: %m%n</patternLayout>
<fileNameBase>${edex.home}/logs/edex-${edex.run.mode}-%s{name}-%d{yyyyMMdd}.log</fileNameBase>
<defaultAppenderName>asyncConsole</defaultAppenderName>
<appender-ref ref="asyncConsole"/>
<appender-ref ref="shef"/>
<appender-ref ref="purge"/>
</appender>
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-performance-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-limited-loggers.xml"/>
<include file="${edex.home}/conf/logback-edex-hibernate-logger.xml"/>
<appender name="PerformanceLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="PerformanceLog" />
</appender>
<logger name="PerformanceLogger" additivity="false">
<logger name="ActiveTableChange" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="PerformanceLogAsync" />
<appender-ref ref="activeTableChangeLogAsync" />
</logger>
<logger name="com.raytheon">
<level value="INFO"/>
</logger>
<logger name="com.raytheon.edex.plugin.shef" additivity="false">
<level value="INFO"/>
<appender-ref ref="shef" />
</logger>
<logger name="com.raytheon.edex.services.ArchiveSrv">
<level value="WARN"/>
</logger>
<logger name="com.raytheon.edex.services.NotifySrv">
<level value="WARN"/>
</logger>
<!-- used by c3p0 -->
<logger name="com.mchange">
<level value="ERROR"/>
</logger>
<logger name="com.tc">
<level value="WARN"/>
</logger>
<logger name="mx4j">
<level value="ERROR"/>
</logger>
<logger name="org.apache">
<level value="INFO"/>
</logger>
<logger name="org.apache.activemq.spring">
<level value="WARN"/>
</logger>
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
<level value="ERROR"/>
</logger>
<logger name="org.apache.camel.core.xml">
<level value="WARN"/>
</logger>
<logger name="org.apache.qpid">
<level value="INFO"/>
</logger>
<logger name="org.apache.qpid.client.BasicMessageProducer_0_10">
<level value="WARN"/>
</logger>
<logger name="org.apache.xbean.spring">
<level value="WARN"/>
</logger>
<logger name="org.quartz">
<level value="ERROR"/>
</logger>
<logger name="org.springframework">
<level value="ERROR"/>
</logger>
<logger name="org.hibernate" additivity="false">
<level value="ERROR"/>
<appender-ref ref="HibernateLog" />
</logger>
<logger name="org.geotools">
<level value="WARN"/>
</logger>
<logger name="com.raytheon.uf.edex.purgesrv" additivity="false">
<level value="INFO"/>

View file

@ -1,138 +1,55 @@
<configuration debug="false" scan="true">
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<!-- hibernate log -->
<appender name="HibernateLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-hibernate-%d{yyyyMMdd}.log</fileNamePattern>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/logback-edex-appenders.xml" />
<!-- BandwidthManager log -->
<appender name="BandwidthManagerLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-bandwidth-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>bandwidth</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- data delivery log -->
<appender name="DataDeliveryLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-datadelivery-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>datadelivery</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- data delivery Notification log -->
<appender name="NotificationLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-notification-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>notification</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- data delivery Retrieval log -->
<appender name="RetrievalLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-retrieval-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>retrieval</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<!-- Purge log -->
<appender name="PurgeLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-purge-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>purge</name>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="ThreadBasedLog" class="com.raytheon.uf.common.status.logback.ThreadBasedAppender">
<maxHistory>30</maxHistory>
<patternLayout>%-5p %d [%t] %c{0}: %m%n</patternLayout>
<fileNameBase>${edex.home}/logs/edex-${edex.run.mode}-%s{name}-%d{yyyyMMdd}.log</fileNameBase>
<defaultAppenderName>console</defaultAppenderName>
<appender-ref ref="console"/>
</appender>
<logger name="com.raytheon">
<level value="INFO"/>
</logger>
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-hibernate-logger.xml" />
<!-- used by c3p0 -->
<logger name="com.mchange">
<level value="ERROR"/>
</logger>
<logger name="com.tc">
<level value="WARN"/>
</logger>
<logger name="mx4j">
<level value="ERROR"/>
</logger>
<logger name="org.apache">
<level value="INFO"/>
</logger>
<logger name="org.apache.activemq.spring">
<level value="WARN"/>
</logger>
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
<level value="ERROR"/>
</logger>
<logger name="org.apache.camel.core.xml">
<level value="WARN"/>
</logger>
<logger name="org.apache.qpid">
<level value="INFO"/>
</logger>
<logger name="org.apache.qpid.client.BasicMessageProducer_0_10">
<level value="WARN"/>
</logger>
<logger name="org.apache.xbean.spring">
<level value="WARN"/>
</logger>
<logger name="org.quartz">
<level value="ERROR"/>
</logger>
<logger name="org.springframework">
<level value="ERROR"/>
</logger>
<logger name="org.hibernate" additivity="false">
<level value="ERROR"/>
<appender-ref ref="HibernateLog" />
</logger>
<logger name="org.geotools">
<level value="WARN"/>
</logger>
<logger name="edu">
<level value="WARN" />
</logger>

View file

@ -0,0 +1,68 @@
<configuration debug="false" scan="true">
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/logback-edex-appenders.xml" />
<!-- ProductSrvRequest log -->
<appender name="ProductSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>productSrvRequest</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="ProductSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="ProductSrvRequestLog" />
</appender>
<!-- TextDBSrvRequest log -->
<appender name="TextDBSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>textdbSrvRequest</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="TextDBSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="TextDBSrvRequestLog" />
</appender>
<!-- ThriftSrv (RemoteRequestRouteWrapper) request log -->
<appender name="ThriftSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="com.raytheon.uf.common.status.logback.StdTimeBasedRollingPolicy">
<name>thriftSrv</name>
</rollingPolicy>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="ThriftSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="ThriftSrvRequestLog" />
</appender>
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-limited-loggers.xml"/>
<logger name="ProductSrvRequestLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="ProductSrvRequestLogAsync"/>
</logger>
<logger name="TextDBSrvRequestLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="TextDBSrvRequestLogAsync"/>
</logger>
<logger name="ThriftSrvRequestLogger" additivity="false">
<level value="Info"/>
<appender-ref ref="ThriftSrvRequestLogAsync" />
</logger>
<logger name="org.hibernate">
<level value="ERROR"/>
</logger>
<!-- default logging -->
<root>
<level value="INFO"/>
<appender-ref ref="asyncConsole"/>
</root>
</configuration>

View file

@ -1,163 +1,9 @@
<configuration debug="false" scan="true">
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="asyncConsole" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="console" />
</appender>
<!-- hibernate log -->
<appender name="HibernateLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-hibernate-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n %nopex</pattern>
</encoder>
</appender>
<!-- ProductSrvRequest log -->
<appender name="ProductSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-request-productSrvRequest-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="ProductSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="ProductSrvRequestLog" />
</appender>
<!-- TextDBSrvRequest log -->
<appender name="TextDBSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-request-textdbSrvRequest-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="TextDBSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="TextDBSrvRequestLog" />
</appender>
<!-- ThriftSrv (RemoteRequestRouteWrapper) request log -->
<appender name="ThriftSrvRequestLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-request-thriftSrv-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="ThriftSrvRequestLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="ThriftSrvRequestLog" />
</appender>
<!-- Performance log -->
<appender name="PerformanceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${edex.home}/logs/edex-${edex.run.mode}-performance-%d{yyyyMMdd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
</appender>
<appender name="PerformanceLogAsync" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="PerformanceLog" />
</appender>
<logger name="ProductSrvRequestLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="ProductSrvRequestLogAsync"/>
</logger>
<logger name="TextDBSrvRequestLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="TextDBSrvRequestLogAsync"/>
</logger>
<logger name="ThriftSrvRequestLogger" additivity="false">
<level value="Info"/>
<appender-ref ref="ThriftSrvRequestLogAsync" />
</logger>
<logger name="PerformanceLogger" additivity="false">
<level value="DEBUG"/>
<appender-ref ref="PerformanceLogAsync" />
</logger>
<logger name="com.raytheon">
<level value="INFO"/>
</logger>
<logger name="com.raytheon.edex.services.ArchiveSrv">
<level value="WARN"/>
</logger>
<logger name="com.raytheon.edex.services.NotifySrv">
<level value="WARN"/>
</logger>
<!-- used by c3p0 -->
<logger name="com.mchange">
<level value="ERROR"/>
</logger>
<logger name="com.tc">
<level value="WARN"/>
</logger>
<logger name="mx4j">
<level value="ERROR"/>
</logger>
<logger name="org.apache">
<level value="INFO"/>
</logger>
<logger name="org.apache.activemq.spring">
<level value="WARN"/>
</logger>
<logger name="org.apache.camel.impl.converter.DefaultTypeConverter">
<level value="ERROR"/>
</logger>
<logger name="org.apache.camel.core.xml">
<level value="WARN"/>
</logger>
<logger name="org.apache.qpid">
<level value="INFO"/>
</logger>
<logger name="org.apache.qpid.client.BasicMessageProducer_0_10">
<level value="WARN"/>
</logger>
<logger name="org.apache.xbean.spring">
<level value="WARN"/>
</logger>
<logger name="org.quartz">
<level value="ERROR"/>
</logger>
<logger name="org.springframework">
<level value="ERROR"/>
</logger>
<logger name="org.hibernate" additivity="false">
<level value="ERROR"/>
<appender-ref ref="HibernateLog" />
</logger>
<logger name="org.geotools">
<level value="WARN"/>
</logger>
<include file="${edex.home}/conf/logback-edex-properties.xml"/>
<include file="${edex.home}/conf/logback-edex-appenders.xml" />
<!-- Simple default logback. -->
<include file="${edex.home}/conf/logback-edex-loggers.xml" />
<include file="${edex.home}/conf/logback-edex-hibernate-logger.xml" />
<!-- default logging -->
<root>

View file

@ -28,4 +28,5 @@ export SERIALIZE_STREAM_MAX_SIZE_MB=8
export EDEX_DEBUG_PORT=5005
export EDEX_JMX_PORT=1616
export LOG_CONF=logback-request.xml
export MGMT_PORT=9601

View file

@ -34,6 +34,7 @@ import org.eclipse.swt.widgets.Display;
* ------------ ---------- ----------- --------------------------
* 05 Oct 2008 lvenable Initial creation.
* 02 Apr 2009 lvenable TTR fixes.
* 02 Mar 2015 3856 lvenable Cancel the job if the timer is being canceled.
*
* </pre>
*
@ -45,12 +46,12 @@ public class AlertTimer {
/**
* Callback called when the timer fires or when the timer is finished.
*/
private ITimerAction actionCB;
private final ITimerAction actionCB;
/**
* Parent display.
*/
private Display parentDisplay;
private final Display parentDisplay;
/**
* Time in milliseconds between executions.
@ -74,7 +75,7 @@ public class AlertTimer {
*/
private boolean blinkText;
private Job job = new Job("AlertTimer") {
private final Job job = new Job("AlertTimer") {
@Override
protected IStatus run(IProgressMonitor monitor) {
@ -87,6 +88,7 @@ public class AlertTimer {
if (isRunning) {
parentDisplay.syncExec(new Runnable() {
@Override
public void run() {
actionCB.timerAction(blinkText);
}
@ -224,13 +226,14 @@ public class AlertTimer {
* Setting alertPopupDlg Cancel the timer.
*/
public void cancelTimer() {
// only synchronize on cancelling the timer, don't do the syncExec in
// only synchronize on canceling the timer, don't do the syncExec in
// the sync block.
boolean cancel = false;
synchronized (this) {
if (isRunning) {
isRunning = false;
cancel = true;
job.cancel();
}
}
@ -240,6 +243,7 @@ public class AlertTimer {
}
parentDisplay.syncExec(new Runnable() {
@Override
public void run() {
actionCB.timerCompleted();
}

View file

@ -1,9 +1,10 @@
<configuration debug="false" scan="false">
<!-- Only define when not wanting to use the UF Standard.
<property scope="context" name="log.message.pattern" value="%-5p %d [%t] %c{0}: %m%n"/>
-->
<!-- general application log -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
<filter class="com.raytheon.uf.common.status.logback.InvertedThresholdFilter">
<level>INFO</level>
</filter>
@ -14,9 +15,7 @@
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>WARN</level>
</filter>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<appender name="AlertVizAdminLogAppender" class="com.raytheon.uf.common.status.logback.EnvConfigurableRollingFileAppender">
@ -33,9 +32,7 @@
<maxFileSize>2GB</maxFileSize>
</triggeringPolicy>
<append>true</append>
<encoder>
<pattern>%-5p %d [%t] %c{0}: %m%n</pattern>
</encoder>
<encoder class="com.raytheon.uf.common.status.logback.UFStdEncoder"/>
</appender>
<logger name="AlertVizAdminLogger" additivity="false">
@ -47,18 +44,12 @@
<level value="INFO"/>
</logger>
<logger name="com.tc">
<level value="WARN"/>
</logger>
<logger name="mx4j">
<level value="ERROR"/>
</logger>
<logger name="org.apache">
<level value="INFO"/>
</logger>
<logger name="org.apache.activemq.spring">
<level value="WARN"/>
</logger>
<logger name="org.apache.commons.beanutils">
<level value="WARN"/>
</logger>

Binary file not shown.

After

Width:  |  Height:  |  Size: 545 B

View file

@ -1987,4 +1987,16 @@
perspectiveId="com.raytheon.uf.viz.d2d.ui.perspectives.D2D5Pane">
</tearoffperspective>
</extension>
<!-- add right click on tab actions -->
<extension
point="com.raytheon.viz.ui.editorMenuAddition">
<editorMenuAddition
class="com.raytheon.viz.ui.actions.RenameTabAction"
name="Rename Tab"
icon="icons/rename.png"
perspectiveId="com.raytheon.uf.viz.d2d.ui.perspectives.D2D5Pane">
</editorMenuAddition>
</extension>
</plugin>

View file

@ -31,6 +31,7 @@ import java.util.Set;
import org.eclipse.jface.dialogs.InputDialog;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
@ -68,6 +69,7 @@ import com.raytheon.uf.viz.core.drawables.AbstractDescriptor;
import com.raytheon.uf.viz.core.drawables.AbstractRenderableDisplay;
import com.raytheon.uf.viz.core.drawables.ResourcePair;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.maps.display.MapRenderableDisplay;
import com.raytheon.uf.viz.core.procedures.AlterBundleFactory;
import com.raytheon.uf.viz.core.procedures.Bundle;
import com.raytheon.uf.viz.core.procedures.IAlterBundleContributor;
@ -78,6 +80,7 @@ import com.raytheon.uf.viz.core.rsc.AbstractVizResource;
import com.raytheon.uf.viz.core.rsc.ResourceList;
import com.raytheon.uf.viz.d2d.ui.dialogs.procedures.ProcedureComm.BundlePair;
import com.raytheon.viz.ui.HistoryList;
import com.raytheon.viz.ui.IRenameablePart;
import com.raytheon.viz.ui.UiUtil;
import com.raytheon.viz.ui.actions.SaveBundle;
import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
@ -104,6 +107,8 @@ import com.raytheon.viz.ui.editor.AbstractEditor;
* Jun 7, 2013 2074 mnash Remove resource if doesn't instantiate correctly
* Aug 11, 2014 3480 bclement added info logging when procedure is loaded
* Jan 06, 2015 3879 nabowle Disallow copy-in when the view is empty.
* Mar 02, 2015 4204 njensen Copy In uses tab name if applicable
*
* </pre>
*
* @author unknown
@ -569,7 +574,7 @@ public class ProcedureDlg extends CaveSWTDialog {
InputDialog id = new InputDialog(shell,
"Enter Bundle Name", "Enter bundle name:", b.name,
null);
if (InputDialog.OK == id.open()) {
if (Window.OK == id.open()) {
String newName = id.getValue();
if (newName != null
@ -742,7 +747,16 @@ public class ProcedureDlg extends CaveSWTDialog {
}
BundlePair bp = new BundlePair();
if (!IRenameablePart.DEFAULT_PART_NAME.equals(b.getName())
&& b.getDisplays()[0] instanceof MapRenderableDisplay) {
/*
* This is a horrible hack to get a renamed editor's
* name instead of the default of Map.
*/
bp.name = b.getName();
} else {
bp.name = HistoryList.getInstance().getLabels()[0];
}
bp.xml = sb;
bundles.add(bp);
resyncProcedureAndList();
@ -1085,7 +1099,7 @@ public class ProcedureDlg extends CaveSWTDialog {
* otherwise null.
*
* @param fileName
* @return
* @return the dialog if it's open for for the filename
*/
public static ProcedureDlg getDialog(String fileName) {
synchronized (ProcedureDlg.openDialogs) {

View file

@ -60,6 +60,7 @@ import com.raytheon.uf.viz.d2d.core.legend.D2DLegendResource.LegendMode;
import com.raytheon.viz.ui.BundleLoader;
import com.raytheon.viz.ui.EditorUtil;
import com.raytheon.viz.ui.HistoryList;
import com.raytheon.viz.ui.IRenameablePart;
import com.raytheon.viz.ui.UiUtil;
import com.raytheon.viz.ui.color.BackgroundColor;
import com.raytheon.viz.ui.color.IBackgroundColorChangedListener;
@ -93,6 +94,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Oct 10, 2013 #2104 mschenke Switched to use MapScalesManager
* Jul 15, 2014 2954 njensen Updated init() for MapScalesManager change
* Aug 25, 2014 3467 mapeters Removed changing of editability from swapPanes().
* Mar 02, 2015 4204 njensen Support for swapping part names
*
* </pre>
*
@ -100,7 +102,7 @@ import com.vividsolutions.jts.geom.Coordinate;
*
*/
public class SideView extends ViewPart implements IMultiPaneEditor,
IBackgroundColorChangedListener {
IBackgroundColorChangedListener, IRenameablePart {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(SideView.class);
@ -319,13 +321,21 @@ public class SideView extends ViewPart implements IMultiPaneEditor,
AbstractEditor theEditor = (AbstractEditor) editor;
// swap part name
String editorName = theEditor.getPartName();
String viewName = this.getPartName();
if ("D2D Side View".equals(viewName)) {
viewName = IRenameablePart.DEFAULT_PART_NAME;
}
theEditor.setPartName(viewName);
this.setPartName(editorName);
// First thing to do, swap input handlers
// Get editor resource handlers and unregister on editor
final InputPriority[] SWAPPABLE_PRIORITIES = { InputPriority.RESOURCE,
InputPriority.SYSTEM_RESOURCE,
final InputPriority[] SWAPPABLE_PRIORITIES = {
InputPriority.RESOURCE, InputPriority.SYSTEM_RESOURCE,
InputPriority.SYSTEM_RESOURCE_LOW };
HashMap<InputPriority, IInputHandler[]> editorHandlers =
new HashMap<IInputHandler.InputPriority, IInputHandler[]>();
HashMap<InputPriority, IInputHandler[]> editorHandlers = new HashMap<IInputHandler.InputPriority, IInputHandler[]>();
for (InputPriority priority : SWAPPABLE_PRIORITIES) {
IInputHandler[] handlers = theEditor.getMouseManager()
.getHandlersForPriority(priority);
@ -336,8 +346,7 @@ public class SideView extends ViewPart implements IMultiPaneEditor,
}
// Store and unregister input handlers on ourself
HashMap<InputPriority, IInputHandler[]> myHandlers =
new HashMap<IInputHandler.InputPriority, IInputHandler[]>();
HashMap<InputPriority, IInputHandler[]> myHandlers = new HashMap<IInputHandler.InputPriority, IInputHandler[]>();
for (InputPriority priority : SWAPPABLE_PRIORITIES) {
IInputHandler[] handlers = paneManager.getMouseManager()
.getHandlersForPriority(priority);
@ -422,6 +431,8 @@ public class SideView extends ViewPart implements IMultiPaneEditor,
.getId()
.equals(DescriptorMap.getEditorId(myRenderables[0]
.getDescriptor().getClass().getName()))) {
// swap loop properties
LoopProperties editorLoopProperties = theEditor
.getLoopProperties();
theEditor.setLoopProperties(loopProperties);
@ -490,6 +501,8 @@ public class SideView extends ViewPart implements IMultiPaneEditor,
LoopProperties editorLoopProperties = theEditor
.getLoopProperties();
theEditor = UiUtil.createEditor(editorId, myRenderables);
// closed the editor above so have to set the name again
theEditor.setPartName(viewName);
theEditor.setLoopProperties(loopProperties);
this.loopProperties = editorLoopProperties;
@ -550,8 +563,7 @@ public class SideView extends ViewPart implements IMultiPaneEditor,
for (InputPriority priority : SWAPPABLE_PRIORITIES) {
IInputHandler[] handlers = myHandlers.get(priority);
for (IInputHandler handler : handlers) {
theEditor.registerMouseHandler(handler,
priority);
theEditor.registerMouseHandler(handler, priority);
}
}
@ -781,4 +793,10 @@ public class SideView extends ViewPart implements IMultiPaneEditor,
}
refresh();
}
@Override
public void setPartName(String partName) {
super.setPartName(partName);
}
}

View file

@ -21,6 +21,7 @@ package com.raytheon.uf.viz.damagepath;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
@ -36,7 +37,6 @@ import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.LocalizationFileInputStream;
import com.raytheon.uf.common.localization.LocalizationFileOutputStream;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.viz.core.IGraphicsTarget;
@ -75,6 +75,14 @@ public class DamagePathLayer<T extends DamagePathResourceData> extends
private static final String PATH = DIR + IPathManager.SEPARATOR + FILE;
/**
* JVM property to specify the localization level to attempt to save/load
* with. Falls back to USER if not defined.
*/
private static final LocalizationLevel LEVEL_TO_USE = LocalizationLevel
.valueOf(System.getProperty("damage.path.localization.level",
LocalizationLevel.USER.name()));
/*
* TODO: If we support multiple polygons in the future then the jobs will
* need to be smart enough to load/save different files.
@ -104,7 +112,7 @@ public class DamagePathLayer<T extends DamagePathResourceData> extends
// listen for changes to the directory
LocalizationFile dir = PathManagerFactory.getPathManager()
.getLocalizationFile(getUserContext(), DIR);
.getLocalizationFile(getContext(), DIR);
dir.addFileUpdatedObserver(this);
loadJob.setSystem(true);
@ -129,7 +137,7 @@ public class DamagePathLayer<T extends DamagePathResourceData> extends
@Override
protected void disposeInternal() {
LocalizationFile dir = PathManagerFactory.getPathManager()
.getLocalizationFile(getUserContext(), DIR);
.getLocalizationFile(getContext(), DIR);
dir.removeFileUpdatedObserver(this);
super.disposeInternal();
@ -183,13 +191,13 @@ public class DamagePathLayer<T extends DamagePathResourceData> extends
}
}
private LocalizationContext getUserContext() {
private LocalizationContext getContext() {
return PathManagerFactory.getPathManager().getContext(
LocalizationType.COMMON_STATIC, LocalizationLevel.USER);
LocalizationType.COMMON_STATIC, LEVEL_TO_USE);
}
protected LocalizationFile getDamagePathFile() {
LocalizationContext ctx = getUserContext();
LocalizationContext ctx = getContext();
return PathManagerFactory.getPathManager().getLocalizationFile(ctx,
PATH);
}
@ -205,9 +213,9 @@ public class DamagePathLayer<T extends DamagePathResourceData> extends
}
protected void loadDamagePath(LocalizationFile file) {
try (LocalizationFileInputStream fis = file.openInputStream()) {
try (InputStream is = file.openInputStream()) {
GeoJsonUtil json = new GeoJsonUtilSimpleImpl();
Polygon geometry = (Polygon) json.deserializeGeom(fis);
Polygon geometry = (Polygon) json.deserializeGeom(is);
/*
* specifically call super.resetPolygon() cause this.resetPolygon()
* will save the file and we don't want to do that or we could

View file

@ -43,7 +43,8 @@ import com.raytheon.viz.ui.panes.PaneManager;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 8, 2009 mschenke Initial creation
* Oct 08, 2009 mschenke Initial creation
* Mar 02, 2015 4204 njensen Overrode setPartName()
*
* </pre>
*
@ -151,4 +152,9 @@ public class VizXyEditor extends VizMultiPaneEditor implements
setColor(getPaneManager().getInsetPanes(), newColor);
}
@Override
public void setPartName(String partName) {
super.setPartName(partName);
}
}

View file

@ -25,6 +25,7 @@ import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@ -50,7 +51,6 @@ import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.LocalizationFileInputStream;
import com.raytheon.uf.common.localization.LocalizationFileOutputStream;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationException;
@ -86,7 +86,8 @@ import com.vividsolutions.jts.geom.LineString;
* 07-11-12 #875 rferrel Move points to PointsDataManager.
* 01-29-14 DR 16351 D. Friedman Fix updates to storm track from preferences.
* 04-02-14 DR 16351 D. Friedman Fix updates to storm track from preferences. (backport from 14.2.2)
* 06-03-24 3191 njensen Improved saving/loading storm track data
* Jun 03, 2014 3191 njensen Improved saving/loading storm track data
* Feb 24, 2015 3978 njensen Changed to use abstract InputStream
*
* </pre>
*
@ -122,25 +123,25 @@ public class ToolsDataManager implements ILocalizationFileObserver {
private Map<String, LineString> baselines;
private ListenerList baselineListeners = new ListenerList();
private final ListenerList baselineListeners = new ListenerList();
private PointsDataManager pointsManager;
private final PointsDataManager pointsManager;
private Collection<RangeRing> rangeRings;
private StormTrackData stormData;
private ListenerList stormListeners = new ListenerList();
private final ListenerList stormListeners = new ListenerList();
private Object stormLock = new Object();
private final Object stormLock = new Object();
private boolean stormTrackDirty = false;
private LocalizationFile userToolsDir;
private final LocalizationFile userToolsDir;
private IPathManager pathMgr;
private final IPathManager pathMgr;
private BlockingQueue<String> baselineStoreQueue = new LinkedBlockingQueue<String>();
private final BlockingQueue<String> baselineStoreQueue = new LinkedBlockingQueue<String>();
public static synchronized ToolsDataManager getInstance() {
if (theManager == null) {
@ -252,7 +253,7 @@ public class ToolsDataManager implements ILocalizationFileObserver {
userToolsDir.getContext(), userToolsDir.getName()
+ IPathManager.SEPARATOR + STORM_TRACK_FILE);
if (f.exists()) {
LocalizationFileInputStream is = null;
InputStream is = null;
try {
is = f.openInputStream();
stormData = JAXB.unmarshal(is, StormTrackData.class);
@ -380,7 +381,7 @@ public class ToolsDataManager implements ILocalizationFileObserver {
}
}
private Job baselineStoreJob = new Job("Storing Baselines") {
private final Job baselineStoreJob = new Job("Storing Baselines") {
@Override
protected IStatus run(IProgressMonitor monitor) {

View file

@ -21,9 +21,9 @@ package com.raytheon.uf.common.archive.config;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.FieldPosition;
import java.text.MessageFormat;
import java.util.ArrayList;
@ -59,7 +59,6 @@ import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.LocalizationFileInputStream;
import com.raytheon.uf.common.localization.LocalizationFileOutputStream;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.localization.exception.LocalizationException;
@ -98,6 +97,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Apr 01, 2014 2862 rferrel Moved purge only routines to ArchivePurgeManager.
* Apr 29, 2014 3036 rferrel Check for missing archive root directories.
* May 22, 2014 3181 rferrel Add check for valid array index.
* Feb 24, 2015 3978 njensen Changed to use abstract InputStream
*
* </pre>
*
* @author rferrel
@ -910,7 +911,7 @@ public class ArchiveConfigManager {
LocalizationFile lFile) throws IOException, LocalizationException,
DataBindingException {
ArchiveConfig archiveConfig = null;
LocalizationFileInputStream stream = null;
InputStream stream = null;
try {
stream = lFile.openInputStream();
archiveConfig = JAXB.unmarshal(stream, ArchiveConfig.class);
@ -954,7 +955,7 @@ public class ArchiveConfigManager {
LocalizationFile lFile = pathMgr.getLocalizationFile(siteContext,
ARCHIVE_DIR + "/" + fileName);
if (lFile.exists()) {
FileInputStream stream = null;
InputStream stream = null;
try {
stream = lFile.openInputStream();
selections = unmarshallSelectionStream(stream);
@ -1069,7 +1070,7 @@ public class ArchiveConfigManager {
* @return selectConfig
* @throws IOException
*/
private SelectConfig unmarshallSelectionStream(FileInputStream stream)
private SelectConfig unmarshallSelectionStream(InputStream stream)
throws IOException {
SelectConfig selections = null;
try {

View file

@ -31,6 +31,7 @@ package com.raytheon.uf.common.dataplugin.binlightning;
* May 30, 2014 3226 bclement Initial creation
* Jul 07, 2014 3333 bclement added SOURCE
* Jan 22, 2014 2949 nabowle Add default and unknown source values.
* Feb 27, 2015 4181 mapeters Added AVAILABLE_PARAMETERS.
*
* </pre>
*
@ -69,6 +70,13 @@ public class LightningConstants {
public static final String SENSOR_COUNT_DATASET = "sensorCount";
// List of above metadata/HDF5 parameters
public static final String[] AVAILABLE_PARAMETERS = { SOURCE, TIME_DATASET,
LAT_DATASET, LON_DATASET, INTENSITY_DATASET, MSG_TYPE_DATASET,
STRIKE_TYPE_DATASET, PULSE_COUNT_DATSET, PULSE_INDEX_DATASET,
PULSE_TYPE_DATASET, HEIGHT_DATASET, SENSOR_COUNT_DATASET };
// Field values
public static final String DEFAULT_SOURCE = "NLDN";

View file

@ -73,6 +73,7 @@ import com.vividsolutions.jts.geom.GeometryFactory;
* Feb 06, 2014 2672 bsteffen Add envelope support
* Jul 07, 2014 3333 bclement now uses lightning constants
* Jul 30, 2014 3184 njensen Removed getValidIdentifiers()
* Feb 27, 2015 4181 mapeters Overrode getAvailableParameters().
*
* </pre>
*
@ -109,6 +110,11 @@ public class BinLightningAccessFactory extends AbstractDataPluginFactory {
+ " does not support location names");
}
@Override
public String[] getAvailableParameters(IDataRequest request) {
return LightningConstants.AVAILABLE_PARAMETERS;
}
/*
* (non-Javadoc)
*

View file

@ -76,6 +76,7 @@ import com.vividsolutions.jts.geom.Geometry;
* Jun 24, 2014 3170 mnash Get the accumulated time if multiple times are requested
* Jul 14, 2014 3184 njensen Overrode getAvailableLevels()
* Jul 30, 2014 3184 njensen Overrode required and optional identifiers
* Feb 27, 2015 4180 mapeters Overrode getAvailableParameters().
*
* </pre>
*
@ -359,13 +360,40 @@ public class FFMPGeometryFactory extends AbstractDataPluginFactory {
+ domain + "';";
List<Object[]> results = DatabaseQueryUtil.executeDatabaseQuery(
QUERY_MODE.MODE_SQLQUERY, sql, "metadata", "ffmp");
QUERY_MODE.MODE_SQLQUERY, sql, "metadata", PLUGIN_NAME);
for (Object[] oa : results) {
pfafList.add((String) oa[0]);
}
return pfafList.toArray(new String[pfafList.size()]);
return pfafList.toArray(new String[0]);
}
@Override
public String[] getAvailableParameters(IDataRequest request) {
StringBuilder sqlQuery = new StringBuilder("select distinct ")
.append(SOURCE_NAME).append(" from ").append(PLUGIN_NAME);
String keyWord = " where ";
for (Map.Entry<String, Object> entry : request.getIdentifiers()
.entrySet()) {
String key = entry.getKey();
String value = (String) entry.getValue();
sqlQuery.append(keyWord).append(key).append(" = '").append(value)
.append("'");
keyWord = " and ";
}
sqlQuery.append(";");
List<Object[]> results = DatabaseQueryUtil.executeDatabaseQuery(
QUERY_MODE.MODE_SQLQUERY, sqlQuery.toString(), "metadata",
PLUGIN_NAME);
List<String> params = new ArrayList<>(results.size());
for (Object[] r : results) {
params.add((String) r[0]);
}
return params.toArray(new String[0]);
}
@Override

View file

@ -25,7 +25,6 @@ import java.util.Map.Entry;
import org.geotools.coverage.grid.GridGeometry2D;
import com.raytheon.uf.common.dataaccess.IDataFactory;
import com.raytheon.uf.common.dataaccess.IDataRequest;
import com.raytheon.uf.common.dataaccess.exception.DataRetrievalException;
import com.raytheon.uf.common.dataaccess.grid.IGridData;
@ -43,10 +42,8 @@ import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.geospatial.util.SubGridGeometryCalculator;
import com.raytheon.uf.common.numeric.buffer.ByteBufferWrapper;
@ -75,6 +72,10 @@ import com.raytheon.uf.common.util.StringUtil;
* Feb 04, 2014 2672 bsteffen Enable requesting subgrids.
* Jul 30, 2014 3184 njensen Renamed valid identifiers to optional
* Feb 10, 2015 2866 nabowle Overwrite subgrid size estimation.
* Feb 26, 2015 4179 mapeters Overrode getAvailableParameters(), added
* getAvailableValues(), inherits IDataFactory.
* Feb 27, 2015 4179 mapeters Promoted getAvailableValues() to
* AbstractDataPluginFactory.
*
* </pre>
*
@ -82,8 +83,7 @@ import com.raytheon.uf.common.util.StringUtil;
* @version 1.0
*/
public class GFEGridFactory extends AbstractGridDataPluginFactory implements
IDataFactory {
public class GFEGridFactory extends AbstractGridDataPluginFactory {
public static final String MODEL_TIME = "modelTime";
@ -214,6 +214,7 @@ public class GFEGridFactory extends AbstractGridDataPluginFactory implements
* @param subGrid
* @return
*/
@Override
protected long estimateSubgridSize(GridGeometry2D gridGeom,
SubGridGeometryCalculator subGrid) {
long size = gridGeom.getGridRange().getSpan(0)
@ -284,13 +285,13 @@ public class GFEGridFactory extends AbstractGridDataPluginFactory implements
@Override
public String[] getAvailableLocationNames(IDataRequest request) {
DbQueryRequest dbRequest = buildDbQueryRequest(request);
dbRequest.addRequestField(GFEDataAccessUtil.SITE_ID);
dbRequest.setDistinct(true);
DbQueryResponse dbResonse = executeDbQueryRequest(dbRequest,
request.toString());
return getAvailableValues(request, GFEDataAccessUtil.SITE_ID,
String.class);
}
return dbResonse.getFieldObjects(GFEDataAccessUtil.SITE_ID,
@Override
public String[] getAvailableParameters(IDataRequest request) {
return getAvailableValues(request, GFEDataAccessUtil.PARM_NAME,
String.class);
}
@ -302,5 +303,4 @@ public class GFEGridFactory extends AbstractGridDataPluginFactory implements
return (GFERecord) obj;
}
}

View file

@ -20,9 +20,9 @@
package com.raytheon.uf.common.dataplugin.gfe.textproduct;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.exception.LocalizationException;
@ -44,6 +44,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* Mar 23, 2010 randerso Initial creation
* Mar 26, 2014 #2884 randerso Code clean up
* Feb 24, 2015 3978 njensen Changed to use abstract InputStream
*
* </pre>
*
@ -117,7 +118,7 @@ public class DraftProduct {
throws SerializationException {
byte[] bytes = null;
FileInputStream in = null;
InputStream in = null;
try {
File file = lf.getFile(true);
in = lf.openInputStream();

View file

@ -19,9 +19,13 @@
**/
package com.raytheon.uf.common.dataplugin.grid.dataaccess;
import java.awt.Point;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@ -29,14 +33,27 @@ import java.util.Set;
import javax.measure.unit.Unit;
import org.geotools.coverage.grid.GridEnvelope2D;
import org.geotools.coverage.grid.GridGeometry2D;
import org.geotools.geometry.DirectPosition2D;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.referencing.CRS;
import org.geotools.referencing.crs.DefaultGeographicCRS;
import org.opengis.referencing.FactoryException;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.TransformException;
import com.raytheon.uf.common.comm.CommunicationException;
import com.raytheon.uf.common.dataaccess.IDataRequest;
import com.raytheon.uf.common.dataaccess.exception.DataRetrievalException;
import com.raytheon.uf.common.dataaccess.exception.EnvelopeProjectionException;
import com.raytheon.uf.common.dataaccess.exception.IncompatibleRequestException;
import com.raytheon.uf.common.dataaccess.geom.IGeometryData;
import com.raytheon.uf.common.dataaccess.grid.IGridData;
import com.raytheon.uf.common.dataaccess.impl.AbstractGridDataPluginFactory;
import com.raytheon.uf.common.dataaccess.impl.DefaultGeometryData;
import com.raytheon.uf.common.dataaccess.impl.DefaultGridData;
import com.raytheon.uf.common.dataaccess.util.PDOUtil;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
@ -44,13 +61,22 @@ import com.raytheon.uf.common.dataplugin.grid.dataquery.GridQueryAssembler;
import com.raytheon.uf.common.dataplugin.grid.mapping.DatasetIdMapper;
import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.mapping.LevelMapper;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.geospatial.util.SubGridGeometryCalculator;
import com.raytheon.uf.common.gridcoverage.GridCoverage;
import com.raytheon.uf.common.numeric.source.DataSource;
import com.raytheon.uf.common.parameter.Parameter;
import com.raytheon.uf.common.parameter.mapping.ParameterMapper;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.common.util.mapping.Mapper;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.GeometryFactory;
/**
* Data access factory for accessing data from the Grid plugin as grid types.
@ -70,7 +96,8 @@ import com.raytheon.uf.common.util.mapping.Mapper;
* Oct 16, 2014 3598 nabowle Accept level identifiers.
* Oct 21, 2014 3755 nabowle Add getAvailable levels and parameters.
* Feb 13, 2015 4124 mapeters Inherits IDataFactory.
*
* Feb 23, 2015 4016 bsteffen Add support for geometry requests.
* Feb 27, 2015 4179 mapeters Use AbstractDataPluginFactory.getAvailableValues().
*
* </pre>
*
@ -320,7 +347,8 @@ public class GridDataAccessFactory extends AbstractGridDataPluginFactory {
@Override
public String[] getAvailableLocationNames(IDataRequest request) {
return getAvailableLocationNames(request, GridConstants.DATASET_ID);
return getAvailableValues(request, GridConstants.DATASET_ID,
String.class);
}
/**
@ -328,14 +356,7 @@ public class GridDataAccessFactory extends AbstractGridDataPluginFactory {
*/
@Override
public Level[] getAvailableLevels(IDataRequest request) {
DbQueryRequest dbQueryRequest = buildDbQueryRequest(request);
dbQueryRequest.setDistinct(Boolean.TRUE);
dbQueryRequest.addRequestField(GridConstants.LEVEL);
DbQueryResponse dbQueryResponse = this.executeDbQueryRequest(
dbQueryRequest, request.toString());
return dbQueryResponse
.getFieldObjects(GridConstants.LEVEL, Level.class);
return getAvailableValues(request, GridConstants.LEVEL, Level.class);
}
/**
@ -343,13 +364,297 @@ public class GridDataAccessFactory extends AbstractGridDataPluginFactory {
*/
@Override
public String[] getAvailableParameters(IDataRequest request) {
DbQueryRequest dbQueryRequest = buildDbQueryRequest(request);
dbQueryRequest.setDistinct(Boolean.TRUE);
dbQueryRequest.addRequestField(GridConstants.PARAMETER_ABBREVIATION);
DbQueryResponse dbQueryResponse = this.executeDbQueryRequest(
dbQueryRequest, request.toString());
return dbQueryResponse.getFieldObjects(
return getAvailableValues(request,
GridConstants.PARAMETER_ABBREVIATION, String.class);
}
/**
* Return geometry data for grid records. Each grid cell is treated as a
* single Point Geometry. An envelope must be provided.
*/
@Override
protected IGeometryData[] getGeometryData(IDataRequest request,
DbQueryResponse dbQueryResponse) {
if (request.getEnvelope() == null) {
throw new IncompatibleRequestException(
"Requests for Grid data as a geometry must provide a bounding envelope.");
}
Map<GridGeometryKey, Set<GridRecord>> sortedRecords = new HashMap<>();
for (GridRecord record : dbQueryResponse
.getEntityObjects(GridRecord.class)) {
GridGeometryKey key = new GridGeometryKey(record);
Set<GridRecord> records = sortedRecords.get(key);
if (records == null) {
records = new HashSet<>();
sortedRecords.put(key, records);
}
records.add(record);
}
List<IGeometryData> result = new ArrayList<>(
dbQueryResponse.getNumResults());
for (Entry<GridGeometryKey, Set<GridRecord>> entry : sortedRecords
.entrySet()) {
result.addAll(getGeometryData(request, entry.getKey(),
entry.getValue()));
}
return result.toArray(new IGeometryData[0]);
}
/**
* Get geometry data for many related records. The result will be a single
* {@link IGeometryData} for each grid cell in the request envelope.
* Multiple records will be represented as seperate paramters within each
* IGeometryData.
*/
private Collection<IGeometryData> getGeometryData(IDataRequest request,
GridGeometryKey key, Set<GridRecord> records) {
ReferencedEnvelope requestEnv = new ReferencedEnvelope(
request.getEnvelope(), DefaultGeographicCRS.WGS84);
Point point = findRequestPoint(key.getGridCoverage(),
request.getEnvelope());
if (point != null) {
return Collections.singleton(getGeometryData(point, key, records));
} else {
SubGridGeometryCalculator subGrid = calculateSubGrid(requestEnv,
key.getGridCoverage().getGridGeometry());
if (subGrid != null && !subGrid.isEmpty()) {
return getGeometryData(subGrid, key, records);
} else {
return Collections.emptyList();
}
}
}
/**
* Get geometry data for a single point.
*/
private IGeometryData getGeometryData(Point point, GridGeometryKey key,
Set<GridRecord> records) {
DefaultGeometryData data = key.toGeometryData();
DirectPosition2D llPoint = findResponsePoint(key.getGridCoverage(),
point.x, point.y);
data.setGeometry(new GeometryFactory().createPoint(new Coordinate(
llPoint.x, llPoint.y)));
data.setLocationName(data.getLocationName() + "-" + point.x + ","
+ point.y);
Request request = Request.buildPointRequest(point);
populateGeometryData(records, request,
new DefaultGeometryData[] { data });
return data;
}
/**
* Get geometry data for multiple points within a subgrid that has been
* calculated from a request envelope.
*/
private Collection<IGeometryData> getGeometryData(
SubGridGeometryCalculator subGrid, GridGeometryKey key,
Set<GridRecord> records) {
GridEnvelope2D gridRange = subGrid.getSubGridGeometry2D()
.getGridRange2D();
DefaultGeometryData[] data = new DefaultGeometryData[(int) (gridRange
.getWidth() * gridRange.getHeight())];
GeometryFactory geometryFactory = new GeometryFactory();
int index = 0;
for (int y = (int) gridRange.getMinY(); y < gridRange.getMaxY(); y += 1) {
for (int x = (int) gridRange.getMinX(); x < gridRange.getMaxX(); x += 1) {
data[index] = key.toGeometryData();
DirectPosition2D llPoint = findResponsePoint(
key.getGridCoverage(), x, y);
data[index].setGeometry(geometryFactory
.createPoint(new Coordinate(llPoint.x, llPoint.y)));
data[index].setLocationName(data[index].getLocationName() + "-"
+ x + "," + y);
index += 1;
}
}
Request request = Request.buildSlab(subGrid.getGridRangeLow(true),
subGrid.getGridRangeHigh(false));
populateGeometryData(records, request, data);
return Arrays.<IGeometryData> asList(data);
}
/**
* Populate one or more DefaultGeometryData by requesting data for the
* specified request for each grid record. Each GridRecord is added to each
* DefaultGeometryData as a separate parameter. The order of the
* DefaultGeometryDatas must be the same as the order of the data returned
* from the provided request.
*/
private static void populateGeometryData(Set<GridRecord> records,
Request request, DefaultGeometryData[] data) {
for (GridRecord record : records) {
try {
Parameter parameter = record.getParameter();
IDataRecord dataRecord = PDOUtil.getDataRecord(record, "Data",
request);
if (dataRecord instanceof FloatDataRecord) {
float[] rawArray = ((FloatDataRecord) dataRecord)
.getFloatData();
if (rawArray.length != data.length) {
throw new DataRetrievalException(
"Unexpected response of size "
+ rawArray.length
+ " when expected size is "
+ data.length + " for record " + record);
}
for (int i = 0; i < data.length; i += 1) {
data[i].addData(parameter.getAbbreviation(),
rawArray[i], parameter.getUnit());
}
} else {
String type = dataRecord == null ? "null" : dataRecord
.getClass().getSimpleName();
throw new DataRetrievalException("Unexpected record type("
+ type + ") for " + record);
}
} catch (Exception e) {
throw new DataRetrievalException(
"Failed to retrieve the IDataRecord for GridRecord: "
+ record.toString(), e);
}
}
}
/**
* After a grid point has been requested, this will determine the Geometry
* for that point.
*/
private DirectPosition2D findResponsePoint(GridCoverage coverage, int x,
int y) {
try {
MathTransform grid2crs = coverage.getGridGeometry().getGridToCRS();
MathTransform crs2ll = CRS.findMathTransform(coverage.getCrs(),
DefaultGeographicCRS.WGS84, true);
DirectPosition2D point = new DirectPosition2D(x, y);
grid2crs.transform(point, point);
crs2ll.transform(point, point);
return point;
} catch (TransformException | FactoryException e) {
throw new EnvelopeProjectionException(
"Error determining point from envelope: ", e);
}
}
/**
* Find a single point(grid cell) to request. This will return a result only
* if the envelope is so small that it is entirely within a single grid cell
* and it overlaps the area of the coverage. It tests for this by converting
* the upper left and lower right corners of the envelope into grid space
* and returning the point only if both convert to the same point.
*/
private Point findRequestPoint(GridCoverage coverage, Envelope envelope) {
try {
MathTransform ll2crs = CRS.findMathTransform(
DefaultGeographicCRS.WGS84, coverage.getCrs(), true);
MathTransform crs2grid = coverage.getGridGeometry()
.getCRSToGrid2D();
double[] testPoints = { envelope.getMinX(), envelope.getMinY(),
envelope.getMaxX(), envelope.getMaxY() };
ll2crs.transform(testPoints, 0, testPoints, 0, 2);
crs2grid.transform(testPoints, 0, testPoints, 0, 2);
int minX = (int) Math.round(testPoints[0]);
int minY = (int) Math.round(testPoints[1]);
int maxX = (int) Math.round(testPoints[2]);
int maxY = (int) Math.round(testPoints[3]);
GridEnvelope2D gridRange = coverage.getGridGeometry()
.getGridRange2D();
if (minX == maxX && minY == maxY && gridRange.contains(minX, minY)) {
return new Point(minX, minY);
} else {
return null;
}
} catch (TransformException | FactoryException e) {
throw new EnvelopeProjectionException(
"Error determining point from envelope: ", e);
}
}
/**
* A class for organizing {@link GridRecord}s into groups which can be
* combined into a single {@link IGeometryData} object.
*/
private static final class GridGeometryKey {
private final Level level;
private final DataTime dataTime;
private final String datasetId;
private final GridCoverage coverage;
private final int hashCode;
public GridGeometryKey(GridRecord record) {
this.level = record.getLevel();
this.dataTime = record.getDataTime();
this.datasetId = record.getDatasetId();
this.coverage = record.getLocation();
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((coverage == null) ? 0 : coverage.hashCode());
hashCode = prime * hashCode
+ ((dataTime == null) ? 0 : dataTime.hashCode());
hashCode = prime * hashCode
+ ((datasetId == null) ? 0 : datasetId.hashCode());
hashCode = prime * hashCode
+ ((level == null) ? 0 : level.hashCode());
this.hashCode = hashCode;
}
public DefaultGeometryData toGeometryData() {
DefaultGeometryData geometryData = new DefaultGeometryData();
geometryData.setLevel(level);
geometryData.setDataTime(dataTime);
geometryData.setLocationName(datasetId);
return geometryData;
}
public GridCoverage getGridCoverage() {
return coverage;
}
@Override
public int hashCode() {
return this.hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
GridGeometryKey other = (GridGeometryKey) obj;
if (coverage == null) {
if (other.coverage != null)
return false;
} else if (!coverage.equals(other.coverage))
return false;
if (dataTime == null) {
if (other.dataTime != null)
return false;
} else if (!dataTime.equals(other.dataTime))
return false;
if (datasetId == null) {
if (other.datasetId != null)
return false;
} else if (!datasetId.equals(other.datasetId))
return false;
if (level == null) {
if (other.level != null)
return false;
} else if (!level.equals(other.level))
return false;
return true;
}
}
}

View file

@ -91,6 +91,7 @@ import com.vividsolutions.jts.geom.Envelope;
* fields are used for the level one and two
* values.
* Feb 13, 2015 4124 mapeters Inherits IDataFactory.
* Feb 27, 2015 4179 mapeters Use AbstractDataPluginFactory.getAvailableValues().
*
* </pre>
*
@ -503,7 +504,7 @@ public class RadarGridFactory extends AbstractGridDataPluginFactory {
@Override
public String[] getAvailableLocationNames(IDataRequest request) {
return getAvailableLocationNames(request, ICAO);
return getAvailableValues(request, ICAO, String.class);
}
/**

View file

@ -37,9 +37,7 @@ import com.raytheon.uf.common.dataaccess.impl.DefaultGridData;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.dataplugin.satellite.units.SatelliteUnits;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
import com.raytheon.uf.common.numeric.source.DataSource;
/**
@ -60,6 +58,7 @@ import com.raytheon.uf.common.numeric.source.DataSource;
* Jul 30, 2014 3184 njensen Renamed valid identifiers to optional
* Sep 29, 2014 3596 nabowle Always put creatingEntity in attributes.
* Feb 13, 2015 4124 mapeters Overrode getAvailableParameters(), inherits IDataFactory.
* Feb 27, 2015 4179 mapeters Use AbstractDataPluginFactory.getAvailableValues().
*
* </pre>
*
@ -170,7 +169,7 @@ public class SatelliteGridFactory extends AbstractGridDataPluginFactory {
@Override
public String[] getAvailableLocationNames(IDataRequest request) {
return getAvailableLocationNames(request, FIELD_SECTOR_ID);
return getAvailableValues(request, FIELD_SECTOR_ID, String.class);
}
/**
@ -178,14 +177,7 @@ public class SatelliteGridFactory extends AbstractGridDataPluginFactory {
*/
@Override
public String[] getAvailableParameters(IDataRequest request) {
DbQueryRequest dbQueryRequest = buildDbQueryRequest(request);
dbQueryRequest.setDistinct(Boolean.TRUE);
dbQueryRequest.addRequestField(FIELD_PYHSICAL_ELEMENT);
DbQueryResponse dbQueryResponse = this.executeDbQueryRequest(
dbQueryRequest, request.toString());
return dbQueryResponse.getFieldObjects(FIELD_PYHSICAL_ELEMENT,
String.class);
return getAvailableValues(request, FIELD_PYHSICAL_ELEMENT, String.class);
}
}

View file

@ -19,6 +19,8 @@
**/
package com.raytheon.uf.common.dataplugin.warning.gis;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -65,6 +67,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
* Oct 22, 2013 2361 njensen Use JAXBManager for XML
* Jun 17, 2014 DR 17390 Qinglu Lin Updated getMetaDataMap() for lonField and latField.
* Aug 21, 2014 3353 rferrel Generating Geo Spatial data set no longer on the UI thread.
* Feb 24, 2015 3978 njensen Use openInputStream() for reading file contents
*
* </pre>
*
@ -292,9 +295,13 @@ public class GeospatialFactory {
+ fileName);
if (lf.exists()) {
byte[] data = lf.read();
try (InputStream is = lf.openInputStream()) {
return SerializationUtil.transformFromThrift(
GeospatialDataSet.class, data);
GeospatialDataSet.class, is);
} catch (IOException e) {
throw new SerializationException("Error reading file "
+ lf.getName(), e);
}
} else {
System.out.println("Attempted to load: " + lf.getName()
+ " for site " + site + ", but file does not exist.");

View file

@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Ohd Plug-in
Bundle-SymbolicName: com.raytheon.uf.edex.ohd
Bundle-Version: 1.14.0.qualifier
Bundle-Version: 1.15.0.qualifier
Bundle-Vendor: RAYTHEON
Bundle-RequiredExecutionEnvironment: JavaSE-1.7
Import-Package: com.raytheon.edex.util,

View file

@ -22,11 +22,14 @@ package com.raytheon.uf.edex.ohd.pproc;
import java.io.File;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
import org.hibernate.HibernateException;
import org.opengis.metadata.spatial.PixelOrientation;
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
import com.raytheon.uf.common.dataplugin.binlightning.LightningConstants;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataquery.db.QueryResult;
@ -60,6 +63,9 @@ import com.vividsolutions.jts.geom.Coordinate;
* Aug 20, 2014 3549 njensen Fixed spelling in exceptions
* Sep 17, 2014 3015 bclement improved exception handling
* Dec 04, 2014 3015 njensen Corrected usage of Coordinate(x, y)
* Feb 25, 2015 3992 nabowle Limit getMostRecentStrikes to NLDN.
* Deduplicate lightning data in a
* single BinLightningRecord.
*
* </pre>
*
@ -97,7 +103,8 @@ public class MpeLightningSrv {
QueryResult rs = null;
CoreDao coreDao = new CoreDao(DaoConfig.DEFAULT);
final String lgtSQL = "select datauri from binlightning "
+ "where reftime > (now()- interval \'30 minutes \')";
+ "where reftime > (now()- interval \'30 minutes \')"
+ "and source = '" + LightningConstants.DEFAULT_SOURCE + "'";
try {
rs = (QueryResult) coreDao.executeNativeSql(lgtSQL, true);
} catch (Exception e) {
@ -143,40 +150,62 @@ public class MpeLightningSrv {
float[] longitudes = ltngRec.getLongitudes();
long[] obstimes = ltngRec.getObsTimes();
byte[] strikes = ltngRec.getPulseCounts();
int[] intensities = ltngRec.getIntensities();
Map<LightningData, Integer> lightningData = new HashMap<>();
// convert latitude and longitude to grid coordinate
HRAP hrap = HRAP.getInstance();
PixelOrientation po = PixelOrientation.CENTER;
short[] x_hgrids = new short[latitudes.length];
short[] y_hgrids = new short[longitudes.length];
Coordinate gridCell = new Coordinate();
LightningData data;
Integer oldIntensity;
for (int i = 0; i < latitudes.length; i++) {
float lat = latitudes[i];
float lon = longitudes[i];
Coordinate c = new Coordinate(lon, lat);
gridCell = hrap.latLonToGridCoordinate(c, po);
x_hgrids[i] = (short) gridCell.x;
y_hgrids[i] = (short) gridCell.y;
data = new LightningData((short) gridCell.x,
(short) gridCell.y, obstimes[i], strikes[i]);
// deduplicate lightning data that will create the same
// primary keys.
oldIntensity = lightningData.get(data);
if (oldIntensity == null) {
lightningData.put(data, intensities[i]);
} else {
logger.debug("dataURI " + dataURI
+ " has multiple lightning data for "
+ "ihfs.Lightning pk (" + data.getX() + ", "
+ data.getY() + ", " + data.getObstime() + ")");
if (intensities[i] > oldIntensity.intValue()) {
/*
* highest intensity data is retained. #put() does not
* replace keys, so because only some of the fields are
* used for hashcode and equals, we must remove the old
* key before putting the new key.
*/
lightningData.remove(data);
lightningData.put(data, intensities[i]);
}
}
}
// set up query first
StringBuilder sql = new StringBuilder("INSERT INTO lightning "
+ " (x_hgrid, y_hgrid, obstime, no_of_strike) VALUES");
// form tuples
for (int j = 0; j < x_hgrids.length; j++) {
for (LightningData lightning : lightningData.keySet()) {
// need to convert obstime from seconds
// to timestamp type for table insertion
Timestamp ts = new Timestamp(obstimes[j]);
String tuple = "(" + x_hgrids[j] + "," + y_hgrids[j]
+ ", TIMESTAMP '" + ts.toString() + "' ," + strikes[j]
+ ")";
if (j != x_hgrids.length - 1) {
Timestamp ts = new Timestamp(lightning.getObstime());
String tuple = "(" + lightning.getX() + "," + lightning.getY()
+ ", TIMESTAMP '" + ts.toString() + "' ,"
+ lightning.getStrikes() + ")";
tuple = tuple + ",";
} else {
tuple = tuple + ";";
}
sql.append(tuple);
}
sql.replace(sql.length() - 1, sql.length(), ";");
// insert all the tuples into the 'lightning' table in ihfs
// database.
@ -229,4 +258,98 @@ public class MpeLightningSrv {
QueryResultRow[] rows = getMostRecentStrikes();
ifhsInsertMostRecentStrikes(rows);
}
/**
* Class to simplify deduplicating lightning data in a
* {@link BinLightningRecord} that generate the same ihfs lightning primary
* key.
*/
private static class LightningData {
private short x;
private short y;
private long obstime;
private byte strikes;
public LightningData(short x, short y, long time, byte strikes) {
super();
this.x = x;
this.y = y;
this.obstime = time;
this.strikes = strikes;
}
/**
* @return the x
*/
public short getX() {
return x;
}
/**
* @return the y
*/
public short getY() {
return y;
}
/**
* @return the obstime
*/
public long getObstime() {
return obstime;
}
/**
* @return the strikes
*/
public byte getStrikes() {
return strikes;
}
/**
* Generate a hashcode using the ihfs primary key fields: x, y, and
* time.
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (obstime ^ (obstime >>> 32));
result = prime * result + x;
result = prime * result + y;
return result;
}
/**
* Determine equality using the ihfs primary key fields: x, y, and time.
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
LightningData other = (LightningData) obj;
if (obstime != other.obstime) {
return false;
}
if (x != other.x) {
return false;
}
if (y != other.y) {
return false;
}
return true;
}
}
}

View file

@ -23,7 +23,7 @@ Require-Bundle: com.raytheon.uf.edex.cpgsrv;bundle-version="1.11.7";resolution:=
Import-Package: com.raytheon.uf.common.dataplugin.grid,
com.raytheon.uf.common.ohd,
com.raytheon.uf.common.status,
com.raytheon.uf.edex.dat.utils;resolution:=optional,
com.raytheon.uf.edex.dat.utils,
com.raytheon.uf.edex.plugin.scan.common,
org.apache.commons.logging
Bundle-RequiredExecutionEnvironment: JavaSE-1.7

View file

@ -97,6 +97,14 @@
<permission id="com.raytheon.localization.site/common_static/mixedCase"/>
<permission id="com.raytheon.localization.site/common_static/hazardServices"/>
<permission id="com.raytheon.localization.site/common_static/hazardServices/settings"/>
<!-- TODO Localization permissions should be able to be plugin contributed.
Improve code in the generically [poorly] named FileManager to support
that concept and then split them. -->
<permission id="com.raytheon.localization.site/common_static/damagepath"/>
<user userId="ALL">
<userPermission>com.raytheon.localization.site/common_static/purge</userPermission>
<userPermission>com.raytheon.localization.site/common_static/colormaps</userPermission>
@ -131,6 +139,9 @@
<userPermission>com.raytheon.localization.site/common_static/archiver/purger</userPermission>
<userPermission>com.raytheon.localization.site/common_static/archiver/purger/retention</userPermission>
<userPermission>com.raytheon.localization.site/common_static/archiver/purger/case</userPermission>
<userPermission>com.raytheon.localization.site/common_static/damagepath</userPermission>
<userPermission>com.raytheon.localization.site/common_static/hazardServices</userPermission>
<userPermission>com.raytheon.localization.site/common_static/hazardServices/settings</userPermission>
</user>
</nwsRoleData>

View file

@ -7,18 +7,22 @@
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<endpoint id="webServiceEndpoint"
uri="jetty:http://0.0.0.0:${ebxml.thrift.service.port}${HTTP_SERVER_PATH}?${ebxml.thrift.service.properties}" />
<propertyPlaceholder id="ebxmlThriftServiceProperties"
location="classpath:com.raytheon.uf.edex.registry.ebxml.properties"/>
<route id="webServiceRoute" streamCache="false">
<from uri="webServiceEndpoint" />
<setBody>
<simple>
bodyAs(javax.servlet.http.HttpServletRequest)
</simple>
</setBody>
<bean ref="httpServiceExecutor" method="execute(${body}, ${in.header.CamelHttpServletResponse})"/>
</route>
<restConfiguration component="jetty" host="0.0.0.0" port="{{ebxml.thrift.service.port}}" >
<endpointProperty key="disableStreamCache" value="{{ebxml.thrift.service.disableStreamCache}}" />
<endpointProperty key="chunked" value="{{ebxml.thrift.service.chunked}}" />
</restConfiguration>
<rest path="{{env:HTTP_SERVER_PATH}}">
<post uri="/{format}">
<to uri="bean:httpServiceExecutor?method=execute(${body}, ${header.format}, ${in.header.accept-encoding}, ${in.header.CamelHttpServletResponse})" />
</post>
<post uri="/">
<to uri="bean:httpServiceExecutor?method=execute(${body}, ${in.header.content-type}, ${in.header.accept-encoding}, ${in.header.CamelHttpServletResponse})" />
</post>
</rest>
</camelContext>
</beans>

View file

@ -16,7 +16,8 @@ ebxml-notification-batch-size=200
ebxml-webserver-max-threads=8
# EBXML thrift service port
ebxml.thrift.service.port=9588
ebxml.thrift.service.properties=disableStreamCache=true&chunked=false&httpBindingRef=#binaryHttpBinding&matchOnUriPrefix=true
ebxml.thrift.service.disableStreamCache=true
ebxml.thrift.service.chunked=false
# EBXML webserver port
ebxml.registry.webserver.port=8082
# EBXML registry host

View file

@ -16,7 +16,15 @@
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
# File auto-generated against equivalent DynamicSerialize Java class
# File auto-generated against equivalent DynamicSerialize Java class and
# modified.
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 2015-02-27 4174 nabowle Output full stacktrace.
#
class SerializableExceptionWrapper(object):
@ -35,6 +43,9 @@ class SerializableExceptionWrapper(object):
retVal = "" + self.exceptionClass + " exception thrown: " + self.message + "\n"
for element in self.stackTrace:
retVal += "\tat " + str(element) + "\n"
if self.wrapper:
retVal += "Caused by: " + self.wrapper.__repr__()
return retVal
def getStackTrace(self):