diff options
Diffstat (limited to 'datarouter-prov/src/main/resources')
10 files changed, 737 insertions, 737 deletions
diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt b/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt index 03eabb7d..21000d0a 100644 --- a/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt +++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt @@ -1,44 +1,44 @@ -#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-{
- "name": "Jettydemo",
- "version": "m1.0",
- "description": "Jettydemo",
- "business_description": "Jettydemo",
- "suspend": false,
- "deleted": false,
- "changeowner": true,
- "authorization": {
- "classification": "unclassified",
- "endpoint_addrs": [
- "172.100.0.3",
- ],
- "endpoint_ids": [
- {
- "password": "rs873m",
- "id": "rs873m"
- }
- ]
- },
-}
-
+#------------------------------------------------------------------------------- +# ============LICENSE_START================================================== +# * org.onap.dmaap +# * =========================================================================== +# * Copyright © 2017 AT&T Intellectual Property. All rights reserved. +# * =========================================================================== +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# * ============LICENSE_END==================================================== +# * +# * ECOMP is a trademark and service mark of AT&T Intellectual Property. +# * +#------------------------------------------------------------------------------- +{ + "name": "Jettydemo", + "version": "m1.0", + "description": "Jettydemo", + "business_description": "Jettydemo", + "suspend": false, + "deleted": false, + "changeowner": true, + "authorization": { + "classification": "unclassified", + "endpoint_addrs": [ + "172.100.0.3", + ], + "endpoint_ids": [ + { + "password": "rs873m", + "id": "rs873m" + } + ] + }, +} + diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt b/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt index 15ca3095..45e12732 100644 --- a/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt +++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt @@ -1,36 +1,36 @@ -#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-{
- "delivery" :
-
- {
- "url" : "http://172.100.0.3:7070/",
- "user" : "datarouter",
- "password" : "datarouter",
- "use100" : true
- },
- "metadataOnly" : false,
- "suspend" : false,
- "groupid" : 29,
- "subscriber" : "sg481n"
-}
+#------------------------------------------------------------------------------- +# ============LICENSE_START================================================== +# * org.onap.dmaap +# * =========================================================================== +# * Copyright © 2017 AT&T Intellectual Property. All rights reserved. +# * =========================================================================== +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# * ============LICENSE_END==================================================== +# * +# * ECOMP is a trademark and service mark of AT&T Intellectual Property. +# * +#------------------------------------------------------------------------------- +{ + "delivery" : + + { + "url" : "http://172.100.0.3:7070/", + "user" : "datarouter", + "password" : "datarouter", + "use100" : true + }, + "metadataOnly" : false, + "suspend" : false, + "groupid" : 29, + "subscriber" : "sg481n" +} diff --git a/datarouter-prov/src/main/resources/logback.xml b/datarouter-prov/src/main/resources/logback.xml index a9655154..e3d17128 100644 --- a/datarouter-prov/src/main/resources/logback.xml +++ b/datarouter-prov/src/main/resources/logback.xml @@ -1,405 +1,405 @@ -<!--
- ============LICENSE_START==================================================
- * org.onap.dmaap
- * ===========================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ===========================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END====================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
--->
-<configuration scan="true" scanPeriod="3 seconds" debug="true">
- <!--<jmxConfigurator /> -->
- <!-- directory path for all other type logs -->
- <!-- property name="logDir" value="/home/eby/dr2/logs" / -->
- <property name="logDir" value="/opt/app/datartr/logs" />
-
- <!-- directory path for debugging type logs -->
- <!-- property name="debugDir" value="/home/eby/dr2/debug-logs" /-->
-
- <!-- specify the component name
- <ECOMP-component-name>::= "MSO" | "DCAE" | "ASDC " | "AAI" |"Policy" | "SDNC" | "AC" -->
- <!-- This creates the MSO directory in in the LogDir which is not needed, mentioned last directory of the path-->
- <!-- property name="componentName" value="logs"></property -->
-
- <!-- log file names -->
- <property name="generalLogName" value="apicalls" />
- <!-- name="securityLogName" value="security" -->
- <!-- name="performanceLogName" value="performance" -->
- <!-- name="serverLogName" value="server" -->
- <!-- name="policyLogName" value="policy"-->
- <property name="errorLogName" value="errors" />
- <!-- name="metricsLogName" value="metrics" -->
- <!-- name="auditLogName" value="audit" -->
- <!-- name="debugLogName" value="debug" -->
- <property name="jettyLogName" value="jetty"></property>
- <property name="defaultPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|%msg%n" />
- <property name="jettyLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%thread|%.-5level|%msg%n" />
-
- <property name="debugLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|[%caller{3}]|%msg%n" />
-
- <property name="logDirectory" value="${logDir}" />
- <!-- property name="debugLogDirectory" value="${debugDir}/${componentName}" /-->
-
-
- <!-- Example evaluator filter applied against console appender -->
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
-
- <!-- ============================================================================ -->
- <!-- EELF Appenders -->
- <!-- ============================================================================ -->
-
- <!-- The EELFAppender is used to record events to the general application
- log -->
-
-
- <appender name="EELF"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${generalLogName}.log</file>
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${generalLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELF" />
- </appender>
-
- <!-- EELF Security Appender. This appender is used to record security events
- to the security log file. Security events are separate from other loggers
- in EELF so that security log records can be captured and managed in a secure
- way separate from the other logs. This appender is set to never discard any
- events. -->
- <!--appender name="EELFSecurity"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${securityLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${securityLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncEELFSecurity" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <discardingThreshold>0</discardingThreshold>
- <appender-ref ref="EELFSecurity" />
- </appender-->
-
- <!-- EELF Performance Appender. This appender is used to record performance
- records. -->
- <!--appender name="EELFPerformance"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${performanceLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${performanceLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <outputPatternAsHeader>true</outputPatternAsHeader>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncEELFPerformance" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFPerformance" />
- </appender-->
-
- <!-- EELF Server Appender. This appender is used to record Server related
- logging events. The Server logger and appender are specializations of the
- EELF application root logger and appender. This can be used to segregate Server
- events from other components, or it can be eliminated to record these events
- as part of the application root log. -->
- <!--appender name="EELFServer"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${serverLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${serverLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncEELFServer" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFServer" />
- </appender-->
-
-
- <!-- EELF Policy Appender. This appender is used to record Policy engine
- related logging events. The Policy logger and appender are specializations
- of the EELF application root logger and appender. This can be used to segregate
- Policy engine events from other components, or it can be eliminated to record
- these events as part of the application root log. -->
- <!--appender name="EELFPolicy"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${policyLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${policyLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncEELFPolicy" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFPolicy" >
- </appender-->
-
-
- <!-- EELF Audit Appender. This appender is used to record audit engine
- related logging events. The audit logger and appender are specializations
- of the EELF application root logger and appender. This can be used to segregate
- Policy engine events from other components, or it can be eliminated to record
- these events as part of the application root log. -->
-
- <!--appender name="EELFAudit"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${auditLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${auditLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFAudit" />
- </appender-->
-
-<!--appender name="EELFMetrics"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${metricsLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${metricsLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder-->
- <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} -
- %msg%n"</pattern> -->
- <!--pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
-
-
- <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFMetrics"/>
- </appender-->
-
- <appender name="EELFError"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${errorLogName}.log</file>
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>ERROR</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${errorLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncEELFError" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFError"/>
- </appender>
-
- <!-- ============================================================================ -->
- <appender name="jettylog"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${jettyLogName}.log</file>
- <filter class="org.onap.dmaap.datarouter.provisioning.eelf.JettyFilter" />
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${jettyLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${jettyLoggerPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncEELFjettylog" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="jettylog" />
- <includeCallerData>true</includeCallerData>
- </appender>
-
- <!-- ============================================================================ -->
-
-
- <!--appender name="EELFDebug"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${debugLogDirectory}/${debugLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${debugLogDirectory}/${debugLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${debugLoggerPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncEELFDebug" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFDebug" />
- <includeCallerData>true</includeCallerData>
- </appender-->
-
-
- <!-- ============================================================================ -->
- <!-- EELF loggers -->
- <!-- ============================================================================ -->
- <logger name="com.att.eelf" level="info" additivity="false">
- <appender-ref ref="asyncEELF" />
- </logger>
-
- <logger name="com.att.eelf.error" level="error" additivity="false">
- <appender-ref ref="asyncEELFError" />
- </logger>
-
- <logger name="log4j.logger.org.eclipse.jetty" additivity="false" level="info">
- <appender-ref ref="asyncEELFjettylog"/>
- </logger>
-
- <!-- logger name="com.att.eelf.security" level="info" additivity="false">
- <appender-ref ref="asyncEELFSecurity" />
- </logger>
- <logger name="com.att.eelf.perf" level="info" additivity="false">
- <appender-ref ref="asyncEELFPerformance" />
- </logger>
- <logger name="com.att.eelf.server" level="info" additivity="false">
- <appender-ref ref="asyncEELFServer" />
- </logger>
- <logger name="com.att.eelf.policy" level="info" additivity="false">
- <appender-ref ref="asyncEELFPolicy" />
- </logger>
-
- <logger name="com.att.eelf.audit" level="info" additivity="false">
- <appender-ref ref="asyncEELFAudit" />
- </logger>
-
- <logger name="com.att.eelf.metrics" level="info" additivity="false">
- <appender-ref ref="asyncEELFMetrics" />
- </logger>
-
- <logger name="com.att.eelf.debug" level="debug" additivity="false">
- <appender-ref ref="asyncEELFDebug" />
- </logger-->
-
-
-
-
- <root level="INFO">
- <appender-ref ref="asyncEELF" />
- <appender-ref ref="asyncEELFError" />
- <appender-ref ref="asyncEELFjettylog" />
- </root>
-
-</configuration>
+<!-- + ============LICENSE_START================================================== + * org.onap.dmaap + * =========================================================================== + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * =========================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END==================================================== + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * +--> +<configuration scan="true" scanPeriod="3 seconds" debug="true"> + <!--<jmxConfigurator /> --> + <!-- directory path for all other type logs --> + <!-- property name="logDir" value="/home/eby/dr2/logs" / --> + <property name="logDir" value="/opt/app/datartr/logs" /> + + <!-- directory path for debugging type logs --> + <!-- property name="debugDir" value="/home/eby/dr2/debug-logs" /--> + + <!-- specify the component name + <ECOMP-component-name>::= "MSO" | "DCAE" | "ASDC " | "AAI" |"Policy" | "SDNC" | "AC" --> + <!-- This creates the MSO directory in in the LogDir which is not needed, mentioned last directory of the path--> + <!-- property name="componentName" value="logs"></property --> + + <!-- log file names --> + <property name="generalLogName" value="apicalls" /> + <!-- name="securityLogName" value="security" --> + <!-- name="performanceLogName" value="performance" --> + <!-- name="serverLogName" value="server" --> + <!-- name="policyLogName" value="policy"--> + <property name="errorLogName" value="errors" /> + <!-- name="metricsLogName" value="metrics" --> + <!-- name="auditLogName" value="audit" --> + <!-- name="debugLogName" value="debug" --> + <property name="jettyLogName" value="jetty"></property> + <property name="defaultPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|%msg%n" /> + <property name="jettyLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%thread|%.-5level|%msg%n" /> + + <property name="debugLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|[%caller{3}]|%msg%n" /> + + <property name="logDirectory" value="${logDir}" /> + <!-- property name="debugLogDirectory" value="${debugDir}/${componentName}" /--> + + + <!-- Example evaluator filter applied against console appender --> + <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + + <!-- ============================================================================ --> + <!-- EELF Appenders --> + <!-- ============================================================================ --> + + <!-- The EELFAppender is used to record events to the general application + log --> + + + <appender name="EELF" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${generalLogName}.log</file> + <filter class="ch.qos.logback.classic.filter.LevelFilter"> + <level>INFO</level> + <onMatch>ACCEPT</onMatch> + <onMismatch>DENY</onMismatch> + </filter> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${generalLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + + <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELF" /> + </appender> + + <!-- EELF Security Appender. This appender is used to record security events + to the security log file. Security events are separate from other loggers + in EELF so that security log records can be captured and managed in a secure + way separate from the other logs. This appender is set to never discard any + events. --> + <!--appender name="EELFSecurity" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${securityLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${securityLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + + <appender name="asyncEELFSecurity" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <discardingThreshold>0</discardingThreshold> + <appender-ref ref="EELFSecurity" /> + </appender--> + + <!-- EELF Performance Appender. This appender is used to record performance + records. --> + <!--appender name="EELFPerformance" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${performanceLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${performanceLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <outputPatternAsHeader>true</outputPatternAsHeader> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFPerformance" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFPerformance" /> + </appender--> + + <!-- EELF Server Appender. This appender is used to record Server related + logging events. The Server logger and appender are specializations of the + EELF application root logger and appender. This can be used to segregate Server + events from other components, or it can be eliminated to record these events + as part of the application root log. --> + <!--appender name="EELFServer" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${serverLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${serverLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFServer" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFServer" /> + </appender--> + + + <!-- EELF Policy Appender. This appender is used to record Policy engine + related logging events. The Policy logger and appender are specializations + of the EELF application root logger and appender. This can be used to segregate + Policy engine events from other components, or it can be eliminated to record + these events as part of the application root log. --> + <!--appender name="EELFPolicy" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${policyLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${policyLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFPolicy" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFPolicy" > + </appender--> + + + <!-- EELF Audit Appender. This appender is used to record audit engine + related logging events. The audit logger and appender are specializations + of the EELF application root logger and appender. This can be used to segregate + Policy engine events from other components, or it can be eliminated to record + these events as part of the application root log. --> + + <!--appender name="EELFAudit" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${auditLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${auditLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFAudit" /> + </appender--> + +<!--appender name="EELFMetrics" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${metricsLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${metricsLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder--> + <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - + %msg%n"</pattern> --> + <!--pattern>${defaultPattern}</pattern> + </encoder> + </appender> + + + <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFMetrics"/> + </appender--> + + <appender name="EELFError" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${errorLogName}.log</file> + <filter class="ch.qos.logback.classic.filter.LevelFilter"> + <level>ERROR</level> + <onMatch>ACCEPT</onMatch> + <onMismatch>DENY</onMismatch> + </filter> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${errorLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${defaultPattern}</pattern> + </encoder> + </appender> + + <appender name="asyncEELFError" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFError"/> + </appender> + + <!-- ============================================================================ --> + <appender name="jettylog" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${logDirectory}/${jettyLogName}.log</file> + <filter class="org.onap.dmaap.datarouter.provisioning.eelf.JettyFilter" /> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${logDirectory}/${jettyLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${jettyLoggerPattern}</pattern> + </encoder> + </appender> + + <appender name="asyncEELFjettylog" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="jettylog" /> + <includeCallerData>true</includeCallerData> + </appender> + + <!-- ============================================================================ --> + + + <!--appender name="EELFDebug" + class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${debugLogDirectory}/${debugLogName}.log</file> + <rollingPolicy + class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> + <fileNamePattern>${debugLogDirectory}/${debugLogName}.%i.log.zip + </fileNamePattern> + <minIndex>1</minIndex> + <maxIndex>9</maxIndex> + </rollingPolicy> + <triggeringPolicy + class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> + <maxFileSize>5MB</maxFileSize> + </triggeringPolicy> + <encoder> + <pattern>${debugLoggerPattern}</pattern> + </encoder> + </appender> + + <appender name="asyncEELFDebug" class="ch.qos.logback.classic.AsyncAppender"> + <queueSize>256</queueSize> + <appender-ref ref="EELFDebug" /> + <includeCallerData>true</includeCallerData> + </appender--> + + + <!-- ============================================================================ --> + <!-- EELF loggers --> + <!-- ============================================================================ --> + <logger name="com.att.eelf" level="info" additivity="false"> + <appender-ref ref="asyncEELF" /> + </logger> + + <logger name="com.att.eelf.error" level="error" additivity="false"> + <appender-ref ref="asyncEELFError" /> + </logger> + + <logger name="log4j.logger.org.eclipse.jetty" additivity="false" level="info"> + <appender-ref ref="asyncEELFjettylog"/> + </logger> + + <!-- logger name="com.att.eelf.security" level="info" additivity="false"> + <appender-ref ref="asyncEELFSecurity" /> + </logger> + <logger name="com.att.eelf.perf" level="info" additivity="false"> + <appender-ref ref="asyncEELFPerformance" /> + </logger> + <logger name="com.att.eelf.server" level="info" additivity="false"> + <appender-ref ref="asyncEELFServer" /> + </logger> + <logger name="com.att.eelf.policy" level="info" additivity="false"> + <appender-ref ref="asyncEELFPolicy" /> + </logger> + + <logger name="com.att.eelf.audit" level="info" additivity="false"> + <appender-ref ref="asyncEELFAudit" /> + </logger> + + <logger name="com.att.eelf.metrics" level="info" additivity="false"> + <appender-ref ref="asyncEELFMetrics" /> + </logger> + + <logger name="com.att.eelf.debug" level="debug" additivity="false"> + <appender-ref ref="asyncEELFDebug" /> + </logger--> + + + + + <root level="INFO"> + <appender-ref ref="asyncEELF" /> + <appender-ref ref="asyncEELFError" /> + <appender-ref ref="asyncEELFjettylog" /> + </root> + +</configuration> diff --git a/datarouter-prov/src/main/resources/misc/doaction b/datarouter-prov/src/main/resources/misc/doaction index 8e719d4e..c5692901 100644 --- a/datarouter-prov/src/main/resources/misc/doaction +++ b/datarouter-prov/src/main/resources/misc/doaction @@ -24,49 +24,49 @@ for action in "$@" do case "$action" in 'stop') - /opt/app/platform/init.d/drtrprov stop - ;; + /opt/app/platform/init.d/drtrprov stop + ;; 'start') - /opt/app/platform/init.d/drtrprov start || exit 1 - ;; + /opt/app/platform/init.d/drtrprov start || exit 1 + ;; 'backup') - cp log4j.properties log4j.properties.save 2>/dev/null - cp provserver.properties provserver.properties.save 2>/dev/null - cp mail.properties mail.properties.save 2>/dev/null - cp havecert havecert.save 2>/dev/null - cp mysql_init_0001 mysql_init_0001.save 2>/dev/null - ;; + cp log4j.properties log4j.properties.save 2>/dev/null + cp provserver.properties provserver.properties.save 2>/dev/null + cp mail.properties mail.properties.save 2>/dev/null + cp havecert havecert.save 2>/dev/null + cp mysql_init_0001 mysql_init_0001.save 2>/dev/null + ;; 'restore') - cp log4j.properties.save log4j.properties 2>/dev/null - cp provserver.properties.save provserver.properties 2>/dev/null - cp mail.properties.save mail.properties 2>/dev/null - cp havecert.save havecert 2>/dev/null - cp mysql_init_0001.save mysql_init_0001 2>/dev/null - ;; + cp log4j.properties.save log4j.properties 2>/dev/null + cp provserver.properties.save provserver.properties 2>/dev/null + cp mail.properties.save mail.properties 2>/dev/null + cp havecert.save havecert 2>/dev/null + cp mysql_init_0001.save mysql_init_0001 2>/dev/null + ;; 'config') - /bin/bash log4j.properties.tmpl >log4j.properties - /bin/bash provserver.properties.tmpl >provserver.properties - /bin/bash mail.properties.tmpl >mail.properties - /bin/bash havecert.tmpl >havecert - /bin/bash mysql_init_0001.tmpl >mysql_init_0001 - echo "$AFTSWM_ACTION_NEW_VERSION" >VERSION.prov - chmod +x havecert - rm -f /opt/app/platform/rc.d/K90zdrtrprov /opt/app/platform/rc.d/S99zdrtrprov - ln -s ../init.d/drtrprov /opt/app/platform/rc.d/K90zdrtrprov - ln -s ../init.d/drtrprov /opt/app/platform/rc.d/S99zdrtrprov - ;; + /bin/bash log4j.properties.tmpl >log4j.properties + /bin/bash provserver.properties.tmpl >provserver.properties + /bin/bash mail.properties.tmpl >mail.properties + /bin/bash havecert.tmpl >havecert + /bin/bash mysql_init_0001.tmpl >mysql_init_0001 + echo "$AFTSWM_ACTION_NEW_VERSION" >VERSION.prov + chmod +x havecert + rm -f /opt/app/platform/rc.d/K90zdrtrprov /opt/app/platform/rc.d/S99zdrtrprov + ln -s ../init.d/drtrprov /opt/app/platform/rc.d/K90zdrtrprov + ln -s ../init.d/drtrprov /opt/app/platform/rc.d/S99zdrtrprov + ;; 'clean') - rm -f log4j.properties log4j.properties.save - rm -f provserver.properties provserver.properties.save - rm -f mail.properties mail.properties.save - rm -f havecert havecert.properties.save - rm -f mysql_init_0001 mysql_init_0001.save - rm -f VERSION.prov - rm -f /opt/app/platform/rc.d/K90zdrtrprov /opt/app/platform/rc.d/S99zdrtrprov - ;; + rm -f log4j.properties log4j.properties.save + rm -f provserver.properties provserver.properties.save + rm -f mail.properties mail.properties.save + rm -f havecert havecert.properties.save + rm -f mysql_init_0001 mysql_init_0001.save + rm -f VERSION.prov + rm -f /opt/app/platform/rc.d/K90zdrtrprov /opt/app/platform/rc.d/S99zdrtrprov + ;; *) - exit 1 - ;; + exit 1 + ;; esac done exit 0 diff --git a/datarouter-prov/src/main/resources/misc/dr-route b/datarouter-prov/src/main/resources/misc/dr-route index 56d7766c..307614f7 100644 --- a/datarouter-prov/src/main/resources/misc/dr-route +++ b/datarouter-prov/src/main/resources/misc/dr-route @@ -28,5 +28,5 @@ CLASSPATH=`echo /opt/app/datartr/etc /opt/app/datartr/lib/*.jar | tr ' ' ':'` export CLASSPATH JAVA_HOME JAVA_OPTS TZ PATH $JAVA_HOME/bin/java \ - -Dlog4j.configuration=file:///opt/app/datartr/etc/log4j.drroute.properties \ - org.onap.dmaap.datarouter.provisioning.utils.DRRouteCLI $* + -Dlog4j.configuration=file:///opt/app/datartr/etc/log4j.drroute.properties \ + org.onap.dmaap.datarouter.provisioning.utils.DRRouteCLI $* diff --git a/datarouter-prov/src/main/resources/misc/drtrprov b/datarouter-prov/src/main/resources/misc/drtrprov index 9f86c23b..e764c145 100644 --- a/datarouter-prov/src/main/resources/misc/drtrprov +++ b/datarouter-prov/src/main/resources/misc/drtrprov @@ -30,108 +30,108 @@ CLASSPATH=`echo /opt/app/datartr/etc /opt/app/datartr/lib/*.jar | tr ' ' ':'` export CLASSPATH JAVA_HOME JAVA_OPTS TZ PATH pids() { - pgrep -u datartr -f provisioning.Main + pgrep -u datartr -f provisioning.Main } start() { - ID=`id -n -u` - GRP=`id -n -g` - if [ "$ID" != "root" ] - then - echo drtrprov must be started as user datartr not $ID - exit 1 - fi + ID=`id -n -u` + GRP=`id -n -g` + if [ "$ID" != "root" ] + then + echo drtrprov must be started as user datartr not $ID + exit 1 + fi # if [ "$GRP" != "datartr" ] -# then -# echo drtrprov must be started as group datartr not $GRP -# exit 1 -# fi -# cd /opt/app/datartr -# if etc/havecert -# then -# echo >/dev/null -# else -# echo No certificate file available. Cannot start -# exit 0 -# fi - if [ "`pgrep -u mysql mysqld`" = "" ] - then - echo MariaDB is not running. It must be started before drtrprov - exit 0 - fi - PIDS=`pids` - if [ "$PIDS" != "" ] - then - echo drtrprov already running - exit 0 - fi - echo '0 1 * * * /opt/app/datartr/bin/runreports' | crontab - nohup java $JAVA_OPTS org.onap.dmaap.datarouter.provisioning.Main </dev/null & - sleep 5 - PIDS=`pids` - if [ "$PIDS" = "" ] - then - echo drtrprov startup failed - else - echo drtrprov started - fi +# then +# echo drtrprov must be started as group datartr not $GRP +# exit 1 +# fi +# cd /opt/app/datartr +# if etc/havecert +# then +# echo >/dev/null +# else +# echo No certificate file available. Cannot start +# exit 0 +# fi + if [ "`pgrep -u mysql mysqld`" = "" ] + then + echo MariaDB is not running. It must be started before drtrprov + exit 0 + fi + PIDS=`pids` + if [ "$PIDS" != "" ] + then + echo drtrprov already running + exit 0 + fi + echo '0 1 * * * /opt/app/datartr/bin/runreports' | crontab + nohup java $JAVA_OPTS org.onap.dmaap.datarouter.provisioning.Main </dev/null & + sleep 5 + PIDS=`pids` + if [ "$PIDS" = "" ] + then + echo drtrprov startup failed + else + echo drtrprov started + fi } stop() { - ID=`id -n -u` - GRP=`id -n -g` - if [ "$ID" != "datartr" ] - then - echo drtrprov must be stopped as user datartr not $ID - exit 1 - fi - if [ "$GRP" != "datartr" ] - then - echo drtrprov must be stopped as group datartr not $GRP - exit 1 - fi - /usr/bin/curl http://127.0.0.1:8080/internal/halt - sleep 5 - PIDS=`pids` - if [ "$PIDS" != "" ] - then - sleep 5 - kill -9 $PIDS - sleep 5 - echo drtrprov stopped - else - echo drtrprov not running - fi + ID=`id -n -u` + GRP=`id -n -g` + if [ "$ID" != "datartr" ] + then + echo drtrprov must be stopped as user datartr not $ID + exit 1 + fi + if [ "$GRP" != "datartr" ] + then + echo drtrprov must be stopped as group datartr not $GRP + exit 1 + fi + /usr/bin/curl http://127.0.0.1:8080/internal/halt + sleep 5 + PIDS=`pids` + if [ "$PIDS" != "" ] + then + sleep 5 + kill -9 $PIDS + sleep 5 + echo drtrprov stopped + else + echo drtrprov not running + fi } status() { - PIDS=`pids` - if [ "$PIDS" != "" ] - then - echo drtrprov running - else - echo drtrprov not running - fi + PIDS=`pids` + if [ "$PIDS" != "" ] + then + echo drtrprov running + else + echo drtrprov not running + fi } case "$1" in 'start') - start - ;; + start + ;; 'stop') - stop - ;; + stop + ;; 'restart') - stop - sleep 20 - start - ;; + stop + sleep 20 + start + ;; 'status') - status - ;; + status + ;; *) - echo "Usage: $0 { start | stop | restart | status }" - exit 1 - ;; + echo "Usage: $0 { start | stop | restart | status }" + exit 1 + ;; esac exit 0 diff --git a/datarouter-prov/src/main/resources/misc/havecert.tmpl b/datarouter-prov/src/main/resources/misc/havecert.tmpl index e2389868..d4968016 100644 --- a/datarouter-prov/src/main/resources/misc/havecert.tmpl +++ b/datarouter-prov/src/main/resources/misc/havecert.tmpl @@ -4,7 +4,7 @@ TZ=GMT0 cd /opt/app/datartr; if [ -f ${DRTR_PROV_KSTOREFILE:-etc/keystore} ] then - exit 0 + exit 0 fi echo `date '+%F %T,000'` WARN Certificate file "${DRTR_PROV_KSTOREFILE:-etc/keystore}" is missing >>${DRTR_PROV_LOGS:-logs}/provint.log exit 1 diff --git a/datarouter-prov/src/main/resources/misc/notes b/datarouter-prov/src/main/resources/misc/notes index 4888dc27..7120c729 100644 --- a/datarouter-prov/src/main/resources/misc/notes +++ b/datarouter-prov/src/main/resources/misc/notes @@ -3,76 +3,76 @@ Package notes for com.att.dmaap.datarouter:prov This component is for the Data Router Provisioning Server software. The following pre-requisite components should already be present: - com.att.aft.swm:swm-cli - com.att.aft.swm:swm-node - - SWM Variables: AFTSWM_AUTOLINK_PARENTS=/opt/app:/opt/app/workload,/opt/app/aft:/opt/app/workload/aft - com.att.platform:uam-auto - com.att.java:jdk8lin - com.att.platform:initd - com.att.platform:port-fwd - - SWM Variables: PLATFORM_PORT_FWD=80,8080|443,8443 - mysql:mysql - mysql:mysql-config - - SWM Variables: MYSQL_CONFIG_SIZE=small - MYSQL_DB_DATABASES=datarouter - MYSQL_DB_datarouter_USERS=datarouter,tier2 - MYSQL_DB_datarouter_USERS_datarouter_LEVEL=RW - MYSQL_DB_datarouter_USERS_datarouter_PASSWORD=datarouter - MYSQL_DB_datarouter_USERS_tier2_LEVEL=RO - MYSQL_DB_datarouter_USERS_tier2_PASSWORD=<password> - MYSQL_MAX_ALLOWED_PACKET=32M - MYSQL_MAX_CONNECTIONS=300 - MYSQL_PASSWORD=datarouter - MYSQL_PORT=3306 + com.att.aft.swm:swm-cli + com.att.aft.swm:swm-node + - SWM Variables: AFTSWM_AUTOLINK_PARENTS=/opt/app:/opt/app/workload,/opt/app/aft:/opt/app/workload/aft + com.att.platform:uam-auto + com.att.java:jdk8lin + com.att.platform:initd + com.att.platform:port-fwd + - SWM Variables: PLATFORM_PORT_FWD=80,8080|443,8443 + mysql:mysql + mysql:mysql-config + - SWM Variables: MYSQL_CONFIG_SIZE=small + MYSQL_DB_DATABASES=datarouter + MYSQL_DB_datarouter_USERS=datarouter,tier2 + MYSQL_DB_datarouter_USERS_datarouter_LEVEL=RW + MYSQL_DB_datarouter_USERS_datarouter_PASSWORD=datarouter + MYSQL_DB_datarouter_USERS_tier2_LEVEL=RO + MYSQL_DB_datarouter_USERS_tier2_PASSWORD=<password> + MYSQL_MAX_ALLOWED_PACKET=32M + MYSQL_MAX_CONNECTIONS=300 + MYSQL_PASSWORD=datarouter + MYSQL_PORT=3306 In a production environment, the SWM variables that MUST be overwridden are: - DRTR_PROV_ACTIVEPOD, DRTR_PROV_STANDBYPOD, DRTR_PROV_NODES + DRTR_PROV_ACTIVEPOD, DRTR_PROV_STANDBYPOD, DRTR_PROV_NODES In addition, in a non-production environment, the DRTR_PROV_CNAME SWM variable must also be overwridden. The SWM variables that can be set to control the provisioning server are: DRTR_PROV_ACTIVEPOD - The FQDN of the active POD + The FQDN of the active POD DRTR_PROV_STANDBYPOD - The FQDN of the standby POD + The FQDN of the standby POD DRTR_PROV_CNAME (default feeds-drtr.web.att.com) - The DNS CNAME used for the prov server in this environment. + The DNS CNAME used for the prov server in this environment. DRTR_PROV_NODES - Pipe-delimited list of DR nodes to init the DB with. + Pipe-delimited list of DR nodes to init the DB with. DRTR_PROV_DOMAIN (default web.att.com) - Domain to use for non-FQDN node names + Domain to use for non-FQDN node names DRTR_PROV_INTHTTPPORT (default 8080) - The TCP/IP port number the component should listen on for "go fetch" - requests from the provisioning server + The TCP/IP port number the component should listen on for "go fetch" + requests from the provisioning server DRTR_PROV_INTHTTPSPORT (default 8443) - The TCP/IP port number the component should listen on for publish - requests from feed publishers and other nodes + The TCP/IP port number the component should listen on for publish + requests from feed publishers and other nodes DRTR_PROV_LOGS (default /opt/app/datartr/logs) - The directory where log files should be kept + The directory where log files should be kept DRTR_PROV_SPOOL (default /opt/app/datartr/spool) - The directory where logfiles from the DR nodes are spooled before being - imported into the DB. + The directory where logfiles from the DR nodes are spooled before being + imported into the DB. DRTR_PROV_KEYMGRPASS (default changeit) - The password for the key manager + The password for the key manager DRTR_PROV_KSTOREFILE (default /opt/app/datartr/etc/keystore) - The java keystore file containing the server certificate and private key - for this server + The java keystore file containing the server certificate and private key + for this server DRTR_PROV_KSTOREPASS (default changeit) - The password for the keystore file + The password for the keystore file DRTR_PROV_TSTOREFILE (by default, use the truststore from the Java JDK) - The java keystore file containing the trusted certificate authority - certificates + The java keystore file containing the trusted certificate authority + certificates DRTR_PROV_TSTOREPASS (default changeit) - The password for the trust store file. Only applies if a trust store - file is specified. + The password for the trust store file. Only applies if a trust store + file is specified. DRTR_PROV_DBLOGIN (default datarouter) - The login used to access MariaDB + The login used to access MariaDB DRTR_PROV_DBPASS (default datarouter) - The password used to access MariaDB + The password used to access MariaDB DRTR_PROV_DBSCRIPTS (default /opt/app/datartr/etc) - The directory containing DB initialization scripts + The directory containing DB initialization scripts diff --git a/datarouter-prov/src/main/resources/misc/provcmd b/datarouter-prov/src/main/resources/misc/provcmd index b48084f1..75d0bffa 100644 --- a/datarouter-prov/src/main/resources/misc/provcmd +++ b/datarouter-prov/src/main/resources/misc/provcmd @@ -26,105 +26,105 @@ export PATH PROVSRVR PROVCMD NOPROXY if [ ! -x /usr/bin/curl ] then - echo provcmd: curl is required for this tool. - exit 1 + echo provcmd: curl is required for this tool. + exit 1 fi optloop= while [ -z "$optloop" ] do - if [ "$1" == '-s' ] - then - shift - PROVSRVR="$1" - shift - elif [ "$1" == '-v' ] - then - shift - VERBOSE=x - elif [ "$1" == '-N' ] - then - shift - NOPROXY='?noproxy=1' - else - optloop=1 - fi + if [ "$1" == '-s' ] + then + shift + PROVSRVR="$1" + shift + elif [ "$1" == '-v' ] + then + shift + VERBOSE=x + elif [ "$1" == '-N' ] + then + shift + NOPROXY='?noproxy=1' + else + optloop=1 + fi done if [ -z "$PROVSRVR" ] then - echo "provcmd: you need to specify the server, either via the -s option" - echo " or by setting and exporting PROVSRVR" - exit 1 + echo "provcmd: you need to specify the server, either via the -s option" + echo " or by setting and exporting PROVSRVR" + exit 1 fi CMD="$1" shift if [ "$CMD" == 'delete' ] then - if [ $# -gt 0 ] - then - for i - do - [ -n "$VERBOSE" ] && echo curl -4 -k -X DELETE "https://$PROVSRVR/internal/api/$1$NOPROXY" - curl -4 -k -X DELETE "https://$PROVSRVR/internal/api/$1$NOPROXY" - done - exit 0 - fi + if [ $# -gt 0 ] + then + for i + do + [ -n "$VERBOSE" ] && echo curl -4 -k -X DELETE "https://$PROVSRVR/internal/api/$1$NOPROXY" + curl -4 -k -X DELETE "https://$PROVSRVR/internal/api/$1$NOPROXY" + done + exit 0 + fi elif [ "$CMD" == 'create' ] then - if [ $# -eq 2 ] - then - # create (with POST), then set the value - [ -n "$VERBOSE" ] && echo curl -4 -k -X POST --data '' "https://$PROVSRVR/internal/api/$1$NOPROXY" - curl -4 -k -X POST --data '' "https://$PROVSRVR/internal/api/$1$NOPROXY" - $PROVCMD set "$1" "$2" - exit 0 - fi + if [ $# -eq 2 ] + then + # create (with POST), then set the value + [ -n "$VERBOSE" ] && echo curl -4 -k -X POST --data '' "https://$PROVSRVR/internal/api/$1$NOPROXY" + curl -4 -k -X POST --data '' "https://$PROVSRVR/internal/api/$1$NOPROXY" + $PROVCMD set "$1" "$2" + exit 0 + fi elif [ "$CMD" == 'get' ] then - if [ $# -eq 1 ] - then - # get - [ -n "$VERBOSE" ] && echo curl -4 -k "https://$PROVSRVR/internal/api/$1$NOPROXY" - curl -4 -k "https://$PROVSRVR/internal/api/$1$NOPROXY" 2>/dev/null | tr '|' '\012' | sort - exit 0 - fi + if [ $# -eq 1 ] + then + # get + [ -n "$VERBOSE" ] && echo curl -4 -k "https://$PROVSRVR/internal/api/$1$NOPROXY" + curl -4 -k "https://$PROVSRVR/internal/api/$1$NOPROXY" 2>/dev/null | tr '|' '\012' | sort + exit 0 + fi elif [ "$CMD" == 'set' ] then - if [ $# -ge 2 ] - then - p="$1" - shift - v="" - for i; do [ -n "$v" ] && v="$v|"; v="$v$i"; done - # set (with PUT) - ue=`urlencode "$v"` - NOPROXY=`echo $NOPROXY | tr '?' '&'` - [ -n "$VERBOSE" ] && echo curl -4 -k -X PUT "https://$PROVSRVR/internal/api/$p?val=$ue$NOPROXY" - curl -4 -k -X PUT "https://$PROVSRVR/internal/api/$p?val=$ue$NOPROXY" - exit 0 - fi + if [ $# -ge 2 ] + then + p="$1" + shift + v="" + for i; do [ -n "$v" ] && v="$v|"; v="$v$i"; done + # set (with PUT) + ue=`urlencode "$v"` + NOPROXY=`echo $NOPROXY | tr '?' '&'` + [ -n "$VERBOSE" ] && echo curl -4 -k -X PUT "https://$PROVSRVR/internal/api/$p?val=$ue$NOPROXY" + curl -4 -k -X PUT "https://$PROVSRVR/internal/api/$p?val=$ue$NOPROXY" + exit 0 + fi elif [ "$CMD" == 'append' ] then - if [ $# -ge 2 ] - then - p="$1" - shift - tmp=`curl -4 -k "https://$PROVSRVR/internal/api/$p$NOPROXY" 2>/dev/null` - $PROVCMD set "$p" "$tmp" "$@" - exit 0 - fi + if [ $# -ge 2 ] + then + p="$1" + shift + tmp=`curl -4 -k "https://$PROVSRVR/internal/api/$p$NOPROXY" 2>/dev/null` + $PROVCMD set "$p" "$tmp" "$@" + exit 0 + fi elif [ "$CMD" == 'remove' ] then - if [ $# -eq 2 ] - then - p="$1" - rm="$2" - $PROVCMD get "$p" | grep -v "^$rm\$" > /tmp/pc$$ - IFS=$'\r\n' - $PROVCMD set "$p" `cat /tmp/pc$$` - rm /tmp/pc$$ - exit 0 - fi + if [ $# -eq 2 ] + then + p="$1" + rm="$2" + $PROVCMD get "$p" | grep -v "^$rm\$" > /tmp/pc$$ + IFS=$'\r\n' + $PROVCMD set "$p" `cat /tmp/pc$$` + rm /tmp/pc$$ + exit 0 + fi fi # Some error somewhere - display usage diff --git a/datarouter-prov/src/main/resources/misc/runreports b/datarouter-prov/src/main/resources/misc/runreports index f6037f4b..a5be6d98 100644 --- a/datarouter-prov/src/main/resources/misc/runreports +++ b/datarouter-prov/src/main/resources/misc/runreports @@ -37,18 +37,18 @@ ID=`id -n -u` GRP=`id -n -g` if [ "$ID" != "datartr" ] then - echo runreports must be started as user datartr not $ID - exit 1 + echo runreports must be started as user datartr not $ID + exit 1 fi if [ "$GRP" != "datartr" ] then - echo runreports must be started as group datartr not $GRP - exit 1 + echo runreports must be started as group datartr not $GRP + exit 1 fi if [ "`pgrep -u mysql mysqld`" = "" ] then - echo MariaDB is not running. It must be started before runreports - exit 1 + echo MariaDB is not running. It must be started before runreports + exit 1 fi # Volume report |