From e4b806bebfdc47f35129a184de098f83f8616c98 Mon Sep 17 00:00:00 2001
From: PradeepSurale
Date: Mon, 18 Feb 2019 16:27:33 +0530
Subject: [PATCH] Code refactoring improvement changes.
Restored the SmartConnectorExample.scala.
---
build.gradle | 2 +
.../impl/SmartConnectorRDDHelper.scala | 4 +-
...State.scala => SnappySessionSQLConf.scala} | 0
.../snappydata/SmartConnectorExample.scala | 42 ++-----------------
settings.gradle | 2 +
5 files changed, 9 insertions(+), 41 deletions(-)
rename encoders/src/main/scala/org/apache/spark/sql/internal/{SessionState.scala => SnappySessionSQLConf.scala} (100%)
diff --git a/build.gradle b/build.gradle
index b6cae25d70..144a0709d1 100644
--- a/build.gradle
+++ b/build.gradle
@@ -818,6 +818,8 @@ task product(type: Zip) {
dependsOn ":snappy-spark:snappy-spark-assembly_${scalaBinaryVersion}:sparkProduct"
dependsOn ':snappy-launcher:jar'
dependsOn ':jdbcJar'
+ // Commented "v2connector" in order to exclude from build and test process.
+ // uncomment if we decide to include it.
// dependsOn ":snappy-v2connector_${scalaBinaryVersion}:jar"
def clusterProject = project(":snappy-cluster_${scalaBinaryVersion}")
diff --git a/core/src/main/scala/io/snappydata/impl/SmartConnectorRDDHelper.scala b/core/src/main/scala/io/snappydata/impl/SmartConnectorRDDHelper.scala
index bb4165bfb6..0b00d052ea 100644
--- a/core/src/main/scala/io/snappydata/impl/SmartConnectorRDDHelper.scala
+++ b/core/src/main/scala/io/snappydata/impl/SmartConnectorRDDHelper.scala
@@ -37,9 +37,7 @@ final class SmartConnectorRDDHelper {
def prepareScan(conn: Connection, txId: String, columnTable: String, projection: Array[Int],
serializedFilters: Array[Byte], partition: SmartExecutorBucketPartition,
catalogVersion: Long): (PreparedStatement, ResultSet) = {
- // Additional (4th) Parameter added in order to support for v2 Connector
- // which decides weather to use kryo serializer or Java serializer. Here
- // we are using kryo serializer
+
val pstmt = conn.prepareStatement("call sys.COLUMN_TABLE_SCAN(?, ?, ?, 1)")
pstmt.setString(1, columnTable)
pstmt.setString(2, projection.mkString(","))
diff --git a/encoders/src/main/scala/org/apache/spark/sql/internal/SessionState.scala b/encoders/src/main/scala/org/apache/spark/sql/internal/SnappySessionSQLConf.scala
similarity index 100%
rename from encoders/src/main/scala/org/apache/spark/sql/internal/SessionState.scala
rename to encoders/src/main/scala/org/apache/spark/sql/internal/SnappySessionSQLConf.scala
diff --git a/examples/src/main/scala/org/apache/spark/examples/snappydata/SmartConnectorExample.scala b/examples/src/main/scala/org/apache/spark/examples/snappydata/SmartConnectorExample.scala
index 485a47af8a..45df2c783c 100644
--- a/examples/src/main/scala/org/apache/spark/examples/snappydata/SmartConnectorExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/snappydata/SmartConnectorExample.scala
@@ -16,9 +16,7 @@
*/
package org.apache.spark.examples.snappydata
-import java.util.{Properties, Random}
-
-import org.apache.spark.sql.{SaveMode, SnappySession, SparkSession}
+import org.apache.spark.sql.{SnappySession, SparkSession}
/**
* This example shows how an application can interact with SnappyStore in Split cluster mode.
@@ -72,40 +70,10 @@ object SmartConnectorExample {
builder.config(params(0), params(1))
})
- builder.config("spark.snappydata.connection", "localhost:1527" )
-
- val spark: SparkSession = builder.getOrCreate
-
- val random = Math.abs(new Random().nextInt())
-
- // scalastyle:off
- println("Random Name "+random)
-
- val prop = Map("user"->"debian-sys-maint", "password"->"8hQgx9JeFX7lfqAk",
- "url"-> "jdbc:mysql://localhost:3306/inventory", "dbtable" -> "STAGING_AIRLINEREF")
-
- val properties = new Properties()
- properties.put("user", "debian-sys-maint")
- properties.put("password", "8hQgx9JeFX7lfqAk")
- properties.put( "url", "jdbc:mysql://localhost:3306/inventory")
- properties.put("dbtable", "STAGING_AIRLINEREF")
-
+ val spark: SparkSession = builder
+ .getOrCreate
val snSession = new SnappySession(spark.sparkContext)
- // val dataset = snSession.sql("select * from AIRLINEREF" )
-
- val dataset = snSession.read.parquet("/home/pradeep/trunk/snappydata/examples/quickstart/data/airportcodeParquetData")
-
- // val dataset = spark.read.parquet("/home/pradeep/" +
-// "trunk/snappydata/examples/quickstart/data/airlineParquetData")
-
- dataset.write.format("jdbc").options(prop).saveAsTable("STAGING_AIRLINEREF" )
-
- // dataset.write.jdbc("jdbc:mysql://localhost:3306/inventory", "AIRLINEREF", properties)
-
- /* val snSession = new SnappySession(spark.sparkContext)
-
- // scalastyle:off
println("\n\n #### Reading from the SnappyStore table SNAPPY_COL_TABLE #### \n")
val colTable = snSession.table("SNAPPY_COL_TABLE")
colTable.show(10)
@@ -120,9 +88,7 @@ object SmartConnectorExample {
snSession.sql("create table TestColumnTable (id bigint not null, k bigint not null) using column")
- dataFrame.write.insertInto("TestColumnTable") */
-
- // scalastyle:off
+ dataFrame.write.insertInto("TestColumnTable")
println(" #### Write to table completed. ### \n\n" +
"Now you can query table TestColumnTable using $SNAPPY_HOME/bin/snappy-shell")
diff --git a/settings.gradle b/settings.gradle
index 181793f2d2..3af9d0bc43 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -27,6 +27,8 @@ include 'dunit'
include ':snappy-dtests_' + scalaBinaryVersion
include ':snappy-compatibility-tests_' + scalaBinaryVersion
include ':snappy-encoders_' + scalaBinaryVersion
+// Commented "v2connector" in order to exclude from build and test process.
+// uncomment if we decide to include it.
// include ':snappy-v2connector_' + scalaBinaryVersion
project(':snappy-jdbc_' + scalaBinaryVersion).projectDir = "$rootDir/jdbc" as File