diff options
author | 2018-08-24 00:35:14 -0400 | |
---|---|---|
committer | 2018-08-24 00:36:47 -0400 | |
commit | 437dbc12410e956076bc067b8cc9e67de2b35b37 (patch) | |
tree | e5cd1eb50b0b4929b0e556202fd715a219b2819d | |
parent | c49d2f158d852d2f308b77abdbad2328daa2ef9c (diff) |
Create Kafka 0.11.0.1 docker image
Issue-ID: DMAAP-628
Change-Id: I3ad0c01d2778870734ccb1ed7daff39afbb18acc
Signed-off-by: sunil unnava <su622b@att.com>
-rw-r--r-- | LICENSE.txt | 39 | ||||
-rw-r--r-- | pom.xml | 216 | ||||
-rw-r--r-- | src/main/docker/Dockerfile | 25 | ||||
-rw-r--r-- | src/main/docker/broker-list.sh | 5 | ||||
-rw-r--r-- | src/main/docker/create-topics.sh | 36 | ||||
-rw-r--r-- | src/main/docker/docker-compose.yml | 15 | ||||
-rw-r--r-- | src/main/docker/download-kafka.sh | 5 | ||||
-rw-r--r-- | src/main/docker/start-kafka.sh | 138 | ||||
-rw-r--r-- | src/main/resources/META-INF/maven/archetype.xml | 9 | ||||
-rw-r--r-- | src/main/resources/archetype-resources/pom.xml | 15 | ||||
-rw-r--r-- | src/main/resources/archetype-resources/src/main/java/App.java | 13 | ||||
-rw-r--r-- | src/main/resources/archetype-resources/src/test/java/AppTest.java | 38 | ||||
-rw-r--r-- | version.properties | 35 |
13 files changed, 589 insertions, 0 deletions
diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..bb235ff --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,39 @@ +/* +* ============LICENSE_START========================================== +* =================================================================== +* Copyright © 2017 AT&T Intellectual Property. All rights reserved. +* =================================================================== +* +* Unless otherwise specified, all software contained herein is licensed +* under the Apache License, Version 2.0 (the “License”); +* you may not use this software except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* +* +* Unless otherwise specified, all documentation contained herein is licensed +* under the Creative Commons License, Attribution 4.0 Intl. (the “License”); +* you may not use this documentation except in compliance with the License. +* You may obtain a copy of the License at +* +* https://creativecommons.org/licenses/by/4.0/ +* +* Unless required by applicable law or agreed to in writing, documentation +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* ============LICENSE_END============================================ +* +* ECOMP is a trademark and service mark of AT&T Intellectual Property. +* +*/ @@ -0,0 +1,216 @@ +<!-- ============LICENSE_START======================================================= + org.onap.dmaap ================================================================================ + Copyright © 2017 AT&T Intellectual Property. All rights reserved. ================================================================================ + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy + of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required + by applicable law or agreed to in writing, software distributed under the + License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS + OF ANY KIND, either express or implied. See the License for the specific + language governing permissions and limitations under the License. ============LICENSE_END========================================================= + ECOMP is a trademark and service mark of AT&T Intellectual Property. --> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + + <parent> + <groupId>org.onap.oparent</groupId> + <artifactId>oparent</artifactId> + <version>1.1.0</version> + </parent> + + <groupId>org.onap.dmaap.kafka</groupId> + <artifactId>kafka11aaf</artifactId> + <version>1.0.0-SNAPSHOT</version> + <name>kafka11aaf</name> + <licenses> + <license> + <name>Apache License Version 2.0</name> + </license> + </licenses> + + <developers> + <developer> + <name>Sunil Unnava</name> + <email></email> + <organization>ATT</organization> + <organizationUrl>www.att.com</organizationUrl> + </developer> + </developers> + + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <version>3.6</version> + <dependencies> + <dependency> + <groupId>org.apache.maven.wagon</groupId> + <artifactId>wagon-webdav-jackrabbit</artifactId> + <version>2.10</version> + </dependency> + </dependencies> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <version>3.0.2</version> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-source-plugin</artifactId> + <version>3.0.0</version> + <executions> + <execution> + <id>attach-sources</id> + <goals> + <goal>jar-no-fork</goal> + </goals> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-release-plugin</artifactId> + <version>2.5.3</version> + <configuration> + <autoVersionSubmodules>true</autoVersionSubmodules> + <checkModificationExcludes> + </checkModificationExcludes> + </configuration> + <dependencies> + <dependency> + <groupId>org.apache.maven.scm</groupId> + <artifactId>maven-scm-provider-gitexe</artifactId> + <version>1.9.4</version> + </dependency> + </dependencies> + </plugin> + + <plugin> + <groupId>com.spotify</groupId> + <artifactId>docker-maven-plugin</artifactId> + <version>1.0.0</version> + <configuration> + <imageName>onap/dmaap/kafka01101</imageName> + <dockerDirectory>src/main/docker</dockerDirectory> + <serverId>docker-hub</serverId> + <imageTags> + <imageTag>${KafkaImg}</imageTag> + <imageTag>latest</imageTag> + </imageTags> + <forceTags>true</forceTags> + <resources> + <!-- <resource> <targetPath>/</targetPath> <directory>${dockerLocation}</directory> + <include>${project.build.finalName}.jar</include> </resource> --> + <resource> + <targetPath>/</targetPath> + <directory>${project.build.directory}</directory> + <include>**/**</include> + </resource> + </resources> + </configuration> + <executions> + <execution> + <id>build-image</id> + <phase>install</phase> + <goals> + <goal>build</goal> + </goals> + <configuration> + <skipDockerBuild>${skip.docker.build}</skipDockerBuild> + </configuration> + </execution> + + <execution> + <id>tag-image-project-version</id> + <phase>install</phase> + <goals> + <goal>tag</goal> + </goals> + <configuration> +  + <newName>${docker.push.registry}/onap/dmaap/kafka01101:${KafkaImg}</newName> + <skipDockerTag>${skip.docker.push}</skipDockerTag> + </configuration> + </execution> + + <execution> + <id>tag-image-latest</id> + <phase>install</phase> + <goals> + <goal>tag</goal> + </goals> + <configuration> +  + <newName>${docker.push.registry}/onap/dmaap/kafka01101:latest</newName> + <skipDockerTag>${skip.docker.push}</skipDockerTag> + </configuration> + </execution> + + <execution> + <id>push-image</id> + <phase>deploy</phase> + <goals> + <goal>push</goal> + </goals> + <configuration> + <imageName>${docker.push.registry}/onap/dmaap/kafka01101:${KafkaImg}</imageName> + <skipDockerPush>${skip.docker.push}</skipDockerPush> + </configuration> + </execution> + + <execution> + <id>push-image-latest</id> + <phase>deploy</phase> + <goals> + <goal>push</goal> + </goals> + <configuration> + <imageName>${docker.push.registry}/onap/dmaap/kafka01101:latest</imageName> + <skipDockerPush>${skip.docker.push}</skipDockerPush> + </configuration> + </execution> + </executions> + </plugin> + + </plugins> + </build> + + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <KafkaImg>0.0.1</KafkaImg> + <sitePath>/content/sites/site/org/onap/dmaap/kafka0111/${project.artifactId}/${project.version}</sitePath> + <skip.docker.build>true</skip.docker.build> + <skip.docker.push>true</skip.docker.push> + <nexusproxy>https://nexus.onap.org</nexusproxy> + <docker.push.registry>nexus3.onap.org:10003</docker.push.registry> + </properties> + + <!-- Distribution management --> + <distributionManagement> + <site> + <id>ecomp-site</id> + <url>dav:${nexusproxy}${sitePath}</url> + </site> + </distributionManagement> + + <dependencies> + </dependencies> + + <profiles> + <profile> + <id>docker</id> + <properties> + <skip.docker.build>false</skip.docker.build> + <skip.docker.tag>false</skip.docker.tag> + <skip.docker.push>false</skip.docker.push> + </properties> + </profile> + + </profiles> + +</project> diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile new file mode 100644 index 0000000..39f997c --- /dev/null +++ b/src/main/docker/Dockerfile @@ -0,0 +1,25 @@ +FROM anapsix/alpine-java + +ARG kafka_version=0.11.0.1 +ARG scala_version=2.12 + + +RUN apk add --update unzip wget curl docker jq coreutils + +ENV KAFKA_VERSION=$kafka_version SCALA_VERSION=$scala_version +ADD download-kafka.sh /tmp/download-kafka.sh +RUN chmod a+x /tmp/download-kafka.sh && sync && /tmp/download-kafka.sh && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz && ln -s /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} /opt/kafka + +VOLUME ["/kafka"] + +ENV KAFKA_HOME /opt/kafka +ENV PATH ${PATH}:${KAFKA_HOME}/bin +ADD start-kafka.sh /usr/bin/start-kafka.sh +ADD broker-list.sh /usr/bin/broker-list.sh +ADD create-topics.sh /usr/bin/create-topics.sh +# The scripts need to have executable permission +RUN chmod a+x /usr/bin/start-kafka.sh && \ + chmod a+x /usr/bin/broker-list.sh && \ + chmod a+x /usr/bin/create-topics.sh +# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown) +CMD ["start-kafka.sh"] diff --git a/src/main/docker/broker-list.sh b/src/main/docker/broker-list.sh new file mode 100644 index 0000000..7f04639 --- /dev/null +++ b/src/main/docker/broker-list.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}') +BROKERS=$(for CONTAINER in $CONTAINERS; do docker port $CONTAINER 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done) +echo $BROKERS | sed -e 's/ /,/g' diff --git a/src/main/docker/create-topics.sh b/src/main/docker/create-topics.sh new file mode 100644 index 0000000..34945b3 --- /dev/null +++ b/src/main/docker/create-topics.sh @@ -0,0 +1,36 @@ +#!/bin/bash + + +if [[ -z "$START_TIMEOUT" ]]; then + START_TIMEOUT=600 +fi + +start_timeout_exceeded=false +count=0 +step=10 +while netstat -lnt | awk '$4 ~ /:'$KAFKA_PORT'$/ {exit 1}'; do + echo "waiting for kafka to be ready" + sleep $step; + count=$(expr $count + $step) + if [ $count -gt $START_TIMEOUT ]; then + start_timeout_exceeded=true + break + fi +done + +if $start_timeout_exceeded; then + echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)" + exit 1 +fi + +if [[ -n $KAFKA_CREATE_TOPICS ]]; then + IFS=','; for topicToCreate in $KAFKA_CREATE_TOPICS; do + echo "creating topics: $topicToCreate" + IFS=':' read -a topicConfig <<< "$topicToCreate" + if [ ${topicConfig[3]} ]; then + JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --config cleanup.policy="${topicConfig[3]}" --if-not-exists + else + JMX_PORT='' $KAFKA_HOME/bin/kafka-topics.sh --create --zookeeper $KAFKA_ZOOKEEPER_CONNECT --replication-factor ${topicConfig[2]} --partitions ${topicConfig[1]} --topic "${topicConfig[0]}" --if-not-exists + fi + done +fi diff --git a/src/main/docker/docker-compose.yml b/src/main/docker/docker-compose.yml new file mode 100644 index 0000000..04b82c3 --- /dev/null +++ b/src/main/docker/docker-compose.yml @@ -0,0 +1,15 @@ +version: '2' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + kafka: + build: . + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: 192.168.99.100 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - /var/run/docker.sock:/var/run/docker.sock diff --git a/src/main/docker/download-kafka.sh b/src/main/docker/download-kafka.sh new file mode 100644 index 0000000..2ddc911 --- /dev/null +++ b/src/main/docker/download-kafka.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred') +url="${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" +wget -q "${url}" -O "/tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" diff --git a/src/main/docker/start-kafka.sh b/src/main/docker/start-kafka.sh new file mode 100644 index 0000000..5571a59 --- /dev/null +++ b/src/main/docker/start-kafka.sh @@ -0,0 +1,138 @@ +#!/bin/bash + +if [[ -z "$KAFKA_PORT" ]]; then + export KAFKA_PORT=9092 +fi + +create-topics.sh & + +if [[ -z "$KAFKA_ADVERTISED_PORT" && \ + -z "$KAFKA_LISTENERS" && \ + -z "$KAFKA_ADVERTISED_LISTENERS" && \ + -S /var/run/docker.sock ]]; then + export KAFKA_ADVERTISED_PORT=$(docker port `hostname` $KAFKA_PORT | sed -r "s/.*:(.*)/\1/g") +fi +if [[ -z "$KAFKA_BROKER_ID" ]]; then + if [[ -n "$BROKER_ID_COMMAND" ]]; then + export KAFKA_BROKER_ID=$(eval $BROKER_ID_COMMAND) + else + # By default auto allocate broker ID + export KAFKA_BROKER_ID=-1 + fi +fi +if [[ -z "$KAFKA_LOG_DIRS" ]]; then + export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME" +fi +if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then + export KAFKA_ZOOKEEPER_CONNECT=$(env | grep ZK.*PORT_2181_TCP= | sed -e 's|.*tcp://||' | paste -sd ,) +fi + +if [[ -n "$KAFKA_HEAP_OPTS" ]]; then + sed -r -i "s/(export KAFKA_HEAP_OPTS)=\"(.*)\"/\1=\"$KAFKA_HEAP_OPTS\"/g" $KAFKA_HOME/bin/kafka-server-start.sh + unset KAFKA_HEAP_OPTS +fi + +if [[ -z "$KAFKA_ADVERTISED_HOST_NAME" && -n "$HOSTNAME_COMMAND" ]]; then + export KAFKA_ADVERTISED_HOST_NAME=$(eval $HOSTNAME_COMMAND) +fi + +if [[ -n "$KAFKA_LISTENER_SECURITY_PROTOCOL_MAP" ]]; then + if [[ -n "$KAFKA_ADVERTISED_PORT" && -n "$KAFKA_ADVERTISED_PROTOCOL_NAME" ]]; then + export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_PROTOCOL_NAME}://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT}" + export KAFKA_LISTENERS="$KAFKA_ADVERTISED_PROTOCOL_NAME://:$KAFKA_ADVERTISED_PORT" + fi + + if [[ -z "$KAFKA_PROTOCOL_NAME" ]]; then + export KAFKA_PROTOCOL_NAME="${KAFKA_ADVERTISED_PROTOCOL_NAME}" + fi + + if [[ -n "$KAFKA_PORT" && -n "$KAFKA_PROTOCOL_NAME" ]]; then + export ADD_LISTENER="${KAFKA_PROTOCOL_NAME}://${KAFKA_HOST_NAME-}:${KAFKA_PORT}" + fi + + if [[ -z "$KAFKA_INTER_BROKER_LISTENER_NAME" ]]; then + export KAFKA_INTER_BROKER_LISTENER_NAME=$KAFKA_PROTOCOL_NAME + fi +else + #DEFAULT LISTENERS + export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://${KAFKA_ADVERTISED_HOST_NAME-}:${KAFKA_ADVERTISED_PORT-$KAFKA_PORT}" + export KAFKA_LISTENERS="PLAINTEXT://${KAFKA_HOST_NAME-}:${KAFKA_PORT-9092}" +fi + +if [[ -n "$ADD_LISTENER" && -n "$KAFKA_LISTENERS" ]]; then + export KAFKA_LISTENERS="${KAFKA_LISTENERS},${ADD_LISTENER}" +fi + +if [[ -n "$ADD_LISTENER" && -z "$KAFKA_LISTENERS" ]]; then + export KAFKA_LISTENERS="${ADD_LISTENER}" +fi + +if [[ -n "$ADD_LISTENER" && -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then + export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${ADD_LISTENER}" +fi + +if [[ -n "$ADD_LISTENER" && -z "$KAFKA_ADVERTISED_LISTENERS" ]]; then + export KAFKA_ADVERTISED_LISTENERS="${ADD_LISTENER}" +fi + +if [[ -n "$KAFKA_INTER_BROKER_LISTENER_NAME" && ! "$KAFKA_INTER_BROKER_LISTENER_NAME"X = "$KAFKA_PROTOCOL_NAME"X ]]; then + if [[ -n "$KAFKA_INTER_BROKER_PORT" ]]; then + export KAFKA_INTER_BROKER_PORT=$(( $KAFKA_PORT + 1 )) + fi + export INTER_BROKER_LISTENER="${KAFKA_INTER_BROKER_LISTENER_NAME}://:${KAFKA_INTER_BROKER_PORT}" + export KAFKA_LISTENERS="${KAFKA_LISTENERS},${INTER_BROKER_LISTENER}" + export KAFKA_ADVERTISED_LISTENERS="${KAFKA_ADVERTISED_LISTENERS},${INTER_BROKER_LISTENER}" + unset KAFKA_INTER_BROKER_PORT + unset KAFKA_SECURITY_INTER_BROKER_PROTOCOL + unset INTER_BROKER_LISTENER +fi + +if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then + export KAFKA_BROKER_RACK=$(eval $RACK_COMMAND) +fi + +#Issue newline to config file in case there is not one already +echo -e "\n" >> $KAFKA_HOME/config/server.properties + +unset KAFKA_CREATE_TOPICS +unset KAFKA_ADVERTISED_PROTOCOL_NAME +unset KAFKA_PROTOCOL_NAME + +if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then + unset KAFKA_ADVERTISED_PORT + unset KAFKA_ADVERTISED_HOST_NAME +fi + +if [[ -n "$KAFKA_LISTENERS" ]]; then + unset KAFKA_PORT + unset KAFKA_HOST_NAME +fi + +for VAR in `env` +do + if [[ $VAR =~ ^KAFKA_ && ! $VAR =~ ^KAFKA_HOME ]]; then + kafka_name=`echo "$VAR" | sed -r "s/KAFKA_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .` + env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"` + if egrep -q "(^|^#)$kafka_name=" $KAFKA_HOME/config/server.properties; then + sed -r -i "s@(^|^#)($kafka_name)=(.*)@\2=${!env_var}@g" $KAFKA_HOME/config/server.properties #note that no config values may contain an '@' char + else + echo "$kafka_name=${!env_var}" >> $KAFKA_HOME/config/server.properties + fi + fi + + if [[ $VAR =~ ^LOG4J_ ]]; then + log4j_name=`echo "$VAR" | sed -r "s/(LOG4J_.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .` + log4j_env=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"` + if egrep -q "(^|^#)$log4j_name=" $KAFKA_HOME/config/log4j.properties; then + sed -r -i "s@(^|^#)($log4j_name)=(.*)@\2=${!log4j_env}@g" $KAFKA_HOME/config/log4j.properties #note that no config values may contain an '@' char + else + echo "$log4j_name=${!log4j_env}" >> $KAFKA_HOME/config/log4j.properties + fi + fi +done + +if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then + eval $CUSTOM_INIT_SCRIPT +fi + +exec $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties diff --git a/src/main/resources/META-INF/maven/archetype.xml b/src/main/resources/META-INF/maven/archetype.xml new file mode 100644 index 0000000..6f2a4ef --- /dev/null +++ b/src/main/resources/META-INF/maven/archetype.xml @@ -0,0 +1,9 @@ +<archetype> + <id>kafka0111</id> + <sources> + <source>src/main/java/App.java</source> + </sources> + <testSources> + <source>src/test/java/AppTest.java</source> + </testSources> +</archetype> diff --git a/src/main/resources/archetype-resources/pom.xml b/src/main/resources/archetype-resources/pom.xml new file mode 100644 index 0000000..96f6e1c --- /dev/null +++ b/src/main/resources/archetype-resources/pom.xml @@ -0,0 +1,15 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>$org.onap.dmaap.kafka</groupId> + <artifactId>$kafka0111</artifactId> + <version>$0.0.1-SNAPSHOT</version> + <dependencies> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>3.8.1</version> + <scope>test</scope> + </dependency> + </dependencies> +</project> diff --git a/src/main/resources/archetype-resources/src/main/java/App.java b/src/main/resources/archetype-resources/src/main/java/App.java new file mode 100644 index 0000000..55262cd --- /dev/null +++ b/src/main/resources/archetype-resources/src/main/java/App.java @@ -0,0 +1,13 @@ +package $org.onap.dmaap.kafka.kafka0111; + +/** + * Hello world! + * + */ +public class App +{ + public static void main( String[] args ) + { + System.out.println( "Hello World!" ); + } +} diff --git a/src/main/resources/archetype-resources/src/test/java/AppTest.java b/src/main/resources/archetype-resources/src/test/java/AppTest.java new file mode 100644 index 0000000..2a113cf --- /dev/null +++ b/src/main/resources/archetype-resources/src/test/java/AppTest.java @@ -0,0 +1,38 @@ +package $org.onap.dmaap.kafka.kafka0111; + +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; + +/** + * Unit test for simple App. + */ +public class AppTest + extends TestCase +{ + /** + * Create the test case + * + * @param testName name of the test case + */ + public AppTest( String testName ) + { + super( testName ); + } + + /** + * @return the suite of tests being tested + */ + public static Test suite() + { + return new TestSuite( AppTest.class ); + } + + /** + * Rigourous Test :-) + */ + public void testApp() + { + assertTrue( true ); + } +} diff --git a/version.properties b/version.properties new file mode 100644 index 0000000..65e7bbc --- /dev/null +++ b/version.properties @@ -0,0 +1,35 @@ +### +# ============LICENSE_START======================================================= +# ONAP CLAMP +# ================================================================================ +# Copyright (C) 2017 AT&T Intellectual Property. All rights +# reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END============================================ +# =================================================================== +### + +# Versioning variables +# Note that these variables cannot be structured (e.g. : version.release or version.snapshot etc... ) +# because they are used in Jenkins, whose plug-in doesn't support + +major=1 +minor=0 +patch=0 + +base_version=${major}.${minor}.${patch} + +# Release must be completed with git revision # in Jenkins +release_version=${base_version} +snapshot_version=${base_version}-SNAPSHOT |