Skip to content

Commit

Permalink
Merge branch 'master' into SNAP-3104
Browse files Browse the repository at this point in the history
# Conflicts:
#	cluster/src/main/scala/io/snappydata/gemxd/ClusterCallbacksImpl.scala
#	core/src/main/scala/io/snappydata/util/ServiceUtils.scala
  • Loading branch information
Vatsal Mevada committed Feb 11, 2020
2 parents dd1c9db + c312afe commit e3be0e3
Show file tree
Hide file tree
Showing 204 changed files with 94,095 additions and 76,548 deletions.
155,393 changes: 80,536 additions & 74,857 deletions LICENSE

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,21 +102,21 @@ SnappyData artifacts are hosted in Maven Central. You can add a Maven dependency
```
groupId: io.snappydata
artifactId: snappydata-cluster_2.11
version: 1.1.1
version: 1.2.0
```

### Using SBT Dependency

If you are using SBT, add this line to your **build.sbt** for core SnappyData artifacts:

```
libraryDependencies += "io.snappydata" % "snappydata-core_2.11" % "1.1.1"
libraryDependencies += "io.snappydata" % "snappydata-core_2.11" % "1.2.0"
```

For additions related to SnappyData cluster, use:

```
libraryDependencies += "io.snappydata" % "snappydata-cluster_2.11" % "1.1.1"
libraryDependencies += "io.snappydata" % "snappydata-cluster_2.11" % "1.2.0"
```

You can find more specific SnappyData artifacts [here](http://mvnrepository.com/artifact/io.snappydata)
Expand Down
72 changes: 30 additions & 42 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ allprojects {
apply plugin: "build-time-tracker"

group = 'io.snappydata'
version = '1.1.1'
version = '1.2.0'

// apply compiler options
tasks.withType(JavaCompile) {
Expand Down Expand Up @@ -108,7 +108,7 @@ allprojects {
scalaBinaryVersion = '2.11'
scalaVersion = scalaBinaryVersion + '.8'
sparkVersion = '2.1.1'
snappySparkVersion = '2.1.1.7'
snappySparkVersion = '2.1.1.8'
sparkDistName = "spark-${sparkVersion}-bin-hadoop2.7"
sparkCurrentVersion = '2.3.2'
sparkCurrentDistName = "spark-${sparkCurrentVersion}-bin-hadoop2.7"
Expand All @@ -117,6 +117,10 @@ allprojects {
junitVersion = '4.12'
mockitoVersion = '1.10.19'
hadoopVersion = '2.7.7'
gcsHadoop2ConnectorVersion = 'hadoop2-2.0.0'
gcsHadoop3ConnectorVersion = 'hadoop3-2.0.0'
sparkAvroVersion = '4.0.0'
sparkXmlVersion = '0.4.1'
scalatestVersion = '2.2.6'
jettyVersion = '9.2.26.v20180806'
guavaVersion = '14.0.1'
Expand Down Expand Up @@ -151,7 +155,7 @@ allprojects {
antlr2Version = '2.7.7'

pegdownVersion = '1.6.0'
snappyStoreVersion = '1.6.4'
snappyStoreVersion = '1.6.5'
snappydataVersion = version
pulseVersion = '1.5.1'
zeppelinInterpreterVersion = '0.7.3.6'
Expand All @@ -163,12 +167,20 @@ allprojects {
osFamilyName = osName.getFamilyName().replace(' ', '').toLowerCase()
osVersion = System.getProperty('os.version')
buildDate = new Date().format('yyyy-MM-dd HH:mm:ss Z')
buildDateShort = ''
if (rootProject.hasProperty('withDate')) {
buildDateShort = "${new Date().format('yyyyMMdd')}_"
}
devEdition = ''
if (rootProject.hasProperty('dev')) {
devEdition = "-dev"
}
buildNumber = new Date().format('MMddyy')
jdkVersion = System.getProperty('java.version')
sparkJobServerVersion = '0.6.2.10'
sparkJobServerVersion = '0.6.2.11'
eclipseCollectionsVersion = '9.2.0'
fastutilVersion = '8.2.2'
sparkMetricsLibVersion = '2.0.0'
snappySparkMetricsLibVersion = '2.0.0.1'

gitCmd = "git --git-dir=${rootDir}/.git --work-tree=${rootDir}"
gitBranch = "${gitCmd} rev-parse --abbrev-ref HEAD".execute().text.trim()
Expand Down Expand Up @@ -568,7 +580,7 @@ subprojects {

// apply default manifest
if (rootProject.hasProperty('enablePublish')) {
createdBy = 'TIBCO Software Inc.'
createdBy = vendorName
}
jar {
manifest {
Expand Down Expand Up @@ -914,6 +926,7 @@ task product(type: Zip) {
dependsOn ":snappy-spark:snappy-spark-assembly_${scalaBinaryVersion}:sparkProduct"
dependsOn ':snappy-launcher:jar'
dependsOn ':jdbcJar'
dependsOn ':copyShadowJars'

def clusterProject = project(":snappy-cluster_${scalaBinaryVersion}")
def launcherProject = project(':snappy-launcher')
Expand Down Expand Up @@ -1074,6 +1087,11 @@ task product(type: Zip) {
into "${snappyProductDir}/connectors"
include "*.jar"
}
copy {
from "${jdbcConnectorProject.projectDir}/../tdv-connector/lib"
into "${snappyProductDir}/connectors"
include "csjdbc8.jar"
}
}
if (hasGemFireConnectorProject) {
copy {
Expand Down Expand Up @@ -1200,7 +1218,6 @@ ospackage {
}

buildRpm {
dependsOn ':packageVSD'
if (rootProject.hasProperty('enablePublish')) {
dependsOn ':packageZeppelinInterpreter'
}
Expand All @@ -1216,7 +1233,6 @@ buildRpm {
}

buildDeb {
dependsOn ':packageVSD'
if (rootProject.hasProperty('enablePublish')) {
dependsOn ':packageZeppelinInterpreter'
}
Expand All @@ -1235,13 +1251,11 @@ buildDeb {
distTar {
// archiveName = 'TIB_compute-ce_' + version + '_' + osFamilyName + '.tar.gz'
if (isEnterpriseProduct) {
archiveName = 'TIB_compute_' + version + '_' + osFamilyName + '.tar.gz'
archiveName = 'TIB_compute' + devEdition + '_' + version + '_' + buildDateShort + osFamilyName + '.tar.gz'
} else {
classifier 'bin'
}
dependsOn product
// also package VSD
dependsOn ':packageVSD'
if (rootProject.hasProperty('enablePublish')) {
dependsOn ':packageZeppelinInterpreter'
}
Expand All @@ -1255,13 +1269,11 @@ distTar {
distZip {
// archiveName = 'TIB_compute-ce_' + version + '_' + osFamilyName + '.zip'
if (isEnterpriseProduct) {
archiveName = 'TIB_compute_' + version + '_' + osFamilyName + '.zip'
archiveName = 'TIB_compute' + devEdition + '_' + version + '_' + buildDateShort + osFamilyName + '.zip'
} else {
classifier 'bin'
}
dependsOn product
// also package VSD
dependsOn ':packageVSD'
if (rootProject.hasProperty('enablePublish')) {
dependsOn ':packageZeppelinInterpreter'
}
Expand Down Expand Up @@ -1332,7 +1344,6 @@ task distInstallers {
task distProduct {
dependsOn product, distTar, distZip
dependsOn distInstallers
dependsOn copyShadowJars
}

task generateSources {
Expand Down Expand Up @@ -1372,6 +1383,7 @@ task buildAll {
dependsOn getTasksByName('assemble', true).collect { it.path }
dependsOn getTasksByName('product', true).collect { it.path }
dependsOn getTasksByName('testClasses', true).collect { it.path }
dependsOn copyShadowJars
mustRunAfter cleanAll
}
task buildDtests {
Expand All @@ -1386,7 +1398,9 @@ task checkAll {
if (rootProject.hasProperty('spark')) {
dependsOn ':snappy-spark:check'
}
dependsOn ":snappy-cluster_${scalaBinaryVersion}:check"
if (!rootProject.hasProperty('cluster.skip')) {
dependsOn ":snappy-cluster_${scalaBinaryVersion}:check"
}
dependsOn ":snappy-examples_${scalaBinaryVersion}:check"
if (!rootProject.hasProperty('aqp.skip') && hasAqpProject && isEnterpriseProduct) {
dependsOn ":snappy-aqp_${scalaBinaryVersion}:check"
Expand Down Expand Up @@ -1489,37 +1503,11 @@ task packagePulse { doLast {
}
} }

task packageVSD { doLast {
String thirdparty = System.env.THIRDPARTYDIR
String vsdDir = ''

if (thirdparty == null || thirdparty.length() == 0) {
vsdDir = "${projectDir}/../thirdparty/vsd"
} else {
vsdDir = "${thirdparty}/vsd"
}

String vsdDistDir = "${vsdDir}/70/vsd"
if (file(vsdDistDir).canWrite()) {
println ''
println "Copying VSD from ${vsdDistDir} to ${snappyProductDir}/vsd"
println ''
delete "${snappyProductDir}/vsd"
copy {
from vsdDistDir
into "${snappyProductDir}/vsd"
}
} else {
println "Skipping VSD due to unwritable ${vsdDistDir}"
}
} }

task sparkPackage {
dependsOn ":snappy-core_${scalaBinaryVersion}:sparkPackage"
}

packagePulse.mustRunAfter product
packageVSD.mustRunAfter product
packageZeppelinInterpreter.mustRunAfter product

distTar.mustRunAfter clean, cleanAll, product
Expand Down
23 changes: 16 additions & 7 deletions cluster/bin/snappy
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,10 @@ elif [ -z "$SNAPPY_NO_QUICK_LAUNCH" -a $# -ge 2 \
. "${SPARK_HOME}"/bin/load-snappy-env.sh

HOSTNAME_FOR_CLIENTS=
IMPLICIT_AWS_CLIENT_BIND_ADDRESS=
if [ "$2" = "start" ]; then
# set hostname-for-clients and SPARK_PUBLIC_DNS in AWS (only supported for Linux)
if [ -z "$SPARK_PUBLIC_DNS" ]; then
if [ -z "$SPARK_PUBLIC_DNS" -o -n "$IMPLICIT_CLIENT_BIND_ADDRESS" ]; then
CHECK_AWS=1
if [ -r /sys/hypervisor/uuid ]; then
if ! grep -q '^ec2' /sys/hypervisor/uuid; then
Expand All @@ -93,19 +94,27 @@ elif [ -z "$SNAPPY_NO_QUICK_LAUNCH" -a $# -ge 2 \
CHECK_AWS=
fi
if [ -n "$CHECK_AWS" ]; then
SPARK_PUBLIC_DNS="$(curl -s --connect-timeout 3 http://169.254.169.254/latest/meta-data/public-ipv4 | head -1)"
if [ -n "$SPARK_PUBLIC_DNS" ]; then
if ! echo $"${@// /\\ }" | grep -q 'hostname-for-clients='; then
HOSTNAME_FOR_CLIENTS="-hostname-for-clients=$SPARK_PUBLIC_DNS"
if [ -z "$SPARK_PUBLIC_DNS" ]; then
SPARK_PUBLIC_DNS="$(curl -s --connect-timeout 3 http://169.254.169.254/latest/meta-data/public-ipv4 | head -1)"
if [ -n "$SPARK_PUBLIC_DNS" ]; then
if ! echo $"${@// /\\ }" | grep -q 'hostname-for-clients='; then
HOSTNAME_FOR_CLIENTS="-hostname-for-clients=$SPARK_PUBLIC_DNS"
fi
export SPARK_PUBLIC_DNS
fi
export SPARK_PUBLIC_DNS
fi
if [ -n "$IMPLICIT_CLIENT_BIND_ADDRESS" ]; then
IMPLICIT_AWS_CLIENT_BIND_ADDRESS="-client-bind-address=0.0.0.0"
fi
elif [ -n "$IMPLICIT_CLIENT_BIND_ADDRESS" ]; then
IMPLICIT_AWS_CLIENT_BIND_ADDRESS="-client-bind-address=${IMPLICIT_CLIENT_BIND_ADDRESS}"
fi
fi
fi

JARS="`echo "${SPARK_HOME}"/jars/snappydata-launcher* "${SPARK_HOME}"/jars/gemfire-shared* "${SPARK_HOME}"/jars/jna-4.* | sed 's/ /:/g'`"
exec $RUNNER $JAVA_ARGS -Xverify:none -cp "$JARS" io.snappydata.tools.QuickLauncher "$@" $HOSTNAME_FOR_CLIENTS
exec $RUNNER $JAVA_ARGS -Xverify:none -cp "$JARS" io.snappydata.tools.QuickLauncher "$@" $HOSTNAME_FOR_CLIENTS $IMPLICIT_AWS_CLIENT_BIND_ADDRESS
IMPLICIT_CLIENT_BIND_ADDRESS=
else
# use full snappy launcher
exec "$SPARK_HOME"/bin/spark-class $JAVA_ARGS io.snappydata.tools.SnappyUtilLauncher "$@"
Expand Down
26 changes: 26 additions & 0 deletions cluster/bin/snappy-scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/usr/bin/env bash

#
# Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License. See accompanying
# LICENSE file.
#

function absPath() {
perl -MCwd -le 'print Cwd::abs_path(shift)' "$1"
}
bin="$(dirname "$(absPath "$0")")"

export SNAPPY_SCRIPT_NAME=snappy-scala
exec "$bin/snappy" "$@"
33 changes: 33 additions & 0 deletions cluster/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,39 @@ dependencies {
exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec')
}

compile "io.snappydata:spark-metrics:${snappySparkMetricsLibVersion}"

compile (group: 'com.databricks', name:'spark-xml_2.11', version: sparkXmlVersion) {
//exclude(group: 'org.scala-lang', module: 'scala-library')
}
compile (group: 'com.databricks', name:'spark-avro_2.11', version: sparkAvroVersion) {
//exclude(group: 'org.scala-lang', module: 'scala-library')
//exclude(group: 'org.apache.avro', module: 'avro')
}
compile(group:'com.google.cloud.bigdataoss', name:'gcs-connector', version: gcsHadoop2ConnectorVersion, classifier: 'shaded')

compile(group: 'org.apache.hadoop', name: 'hadoop-azure', version: hadoopVersion) {
// exclude some dependencies like in hadoop-aws to avoid conflicts with other deps
exclude(group: 'asm', module: 'asm')
exclude(group: 'org.codehaus.jackson', module: 'jackson-mapper-asl')
exclude(group: 'org.ow2.asm', module: 'asm')
exclude(group: 'org.apache.zookeeper', module: 'zookeeper')
exclude(group: 'org.jboss.netty', module: 'netty')
exclude(group: 'jline', module: 'jline')
exclude(group: 'commons-logging', module: 'commons-logging')
exclude(group: 'org.mockito', module: 'mockito-all')
exclude(group: 'org.mortbay.jetty', module: 'servlet-api-2.5')
exclude(group: 'javax.servlet', module: 'servlet-api')
exclude(group: 'junit', module: 'junit')
exclude(group: 'com.google.guava', module: 'guava')
exclude(group: 'com.sun.jersey')
exclude(group: 'com.sun.jersey.jersey-test-framework')
exclude(group: 'com.sun.jersey.contribs')
exclude(group: 'com.google.protobuf', module: 'protobuf-java')
exclude(group: 'com.jcraft', module: 'jsch')
exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec')
}

testCompile project(':dunit')
testCompile "it.unimi.dsi:fastutil:${fastutilVersion}"
testCompile "org.scalatest:scalatest_${scalaBinaryVersion}:${scalatestVersion}"
Expand Down
2 changes: 1 addition & 1 deletion cluster/conf/leads.template
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
#
# IV) Start the SnappyData Zeppelin interpreter on the Lead node
#
# lead1 -dir=/tmp/data/server -spark.ui.port=3333 -spark.executor.cores=16 -zeppelin.interpreter.enable=true -classpath=<Path to jar of Zeppelin Interpreter for SnappyData>
# lead1 -dir=/tmp/data/server -spark.ui.port=3333 -spark.executor.cores=16 -zeppelin.interpreter.enable=true
#
# For more options, see http://snappydatainc.github.io/snappydata/configuration/#configuration
localhost
3 changes: 3 additions & 0 deletions cluster/conf/locators.template
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@
#
# <private hostname/IP> -client-bind-address=<private hostname/IP> -hostname-for-clients=<public hostname/IP for clients>
#
# NOTE: For AWS environment, the cluster launch script (snappy-start-all.sh) automatically appends above two properties with
# appropriate values to the locator's configuration, so that users do not have to specify them here explicitly.
#
# III) Logging to different directory
# Specify the startup directory where the logs and configuration files for that locator instance
# are managed.
Expand Down
4 changes: 4 additions & 0 deletions cluster/conf/servers.template
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,10 @@
#
# <private hostname/IP> -client-bind-address=<private hostname/IP> -client-port=1555 -hostname-for-clients=<public hostname/IP for clients>
#
# NOTE: For AWS environment, the cluster launch script (snappy-start-all.sh) automatically appends above two properties
# viz. -client-bind-address and -hostname-for-clients with appropriate values to the server's configuration, so that
# users do not have to specify them here explicitly.
#
# For more configuration options,
# see http://snappydatainc.github.io/snappydata/configuration/#configuration
localhost
10 changes: 7 additions & 3 deletions cluster/sbin/snappy-nodes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,8 @@ function execute() {
# set the default client-bind-address and locator's peer-discovery-address
if [ -z "${clientBindAddress}" -a "${componentType}" != "lead" ]; then
args="${args} -client-bind-address=${host}"
clientBindAddress="${host}"
preCommand="${preCommand}export IMPLICIT_CLIENT_BIND_ADDRESS=$host; "
export IMPLICIT_CLIENT_BIND_ADDRESS="${host}"
fi
if [ -z "$(echo $args $"${@// /\\ }" | grep 'peer-discovery-address=')" -a "${componentType}" = "locator" ]; then
args="${args} -peer-discovery-address=${host}"
Expand Down Expand Up @@ -243,7 +243,11 @@ function execute() {
-*) postArgs="$postArgs $arg"
esac
done
if [ "$host" != "localhost" ]; then
THIS_HOST_IP=
if [ "$(echo `uname -s`)" == "Linux" ]; then
THIS_HOST_IP="$(echo `hostname -I` | grep "$host")"
fi
if [ "$host" != "localhost" -a -z "$THIS_HOST_IP" ]; then
if [ "$dirfolder" != "" ]; then
# Create the directory for the snappy component if the folder is a default folder
(ssh $SPARK_SSH_OPTS "$host" \
Expand Down
Loading

0 comments on commit e3be0e3

Please sign in to comment.