Skip to content

Commit e5bbce9

Browse files
committed
Revert "[SPARK-2410][SQL] Merging Hive Thrift/JDBC server"
This reverts commit f6ff2a6.
1 parent 81fcdd2 commit e5bbce9

File tree

54 files changed

+96
-1781
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+96
-1781
lines changed

.gitignore

-1
Original file line numberDiff line numberDiff line change
@@ -57,4 +57,3 @@ metastore_db/
5757
metastore/
5858
warehouse/
5959
TempStatsStore/
60-
sql/hive-thriftserver/test_warehouses

assembly/pom.xml

-10
Original file line numberDiff line numberDiff line change
@@ -165,16 +165,6 @@
165165
</dependency>
166166
</dependencies>
167167
</profile>
168-
<profile>
169-
<id>hive-thriftserver</id>
170-
<dependencies>
171-
<dependency>
172-
<groupId>org.apache.spark</groupId>
173-
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
174-
<version>${project.version}</version>
175-
</dependency>
176-
</dependencies>
177-
</profile>
178168
<profile>
179169
<id>spark-ganglia-lgpl</id>
180170
<dependencies>

bagel/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
<groupId>org.apache.spark</groupId>
2929
<artifactId>spark-bagel_2.10</artifactId>
3030
<properties>
31-
<sbt.project.name>bagel</sbt.project.name>
31+
<sbt.project.name>bagel</sbt.project.name>
3232
</properties>
3333
<packaging>jar</packaging>
3434
<name>Spark Project Bagel</name>

bin/beeline

-45
This file was deleted.

bin/compute-classpath.sh

-1
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,6 @@ if [ -n "$SPARK_PREPEND_CLASSES" ]; then
5252
CLASSPATH="$CLASSPATH:$FWDIR/sql/catalyst/target/scala-$SCALA_VERSION/classes"
5353
CLASSPATH="$CLASSPATH:$FWDIR/sql/core/target/scala-$SCALA_VERSION/classes"
5454
CLASSPATH="$CLASSPATH:$FWDIR/sql/hive/target/scala-$SCALA_VERSION/classes"
55-
CLASSPATH="$CLASSPATH:$FWDIR/sql/hive-thriftserver/target/scala-$SCALA_VERSION/classes"
5655
CLASSPATH="$CLASSPATH:$FWDIR/yarn/stable/target/scala-$SCALA_VERSION/classes"
5756
fi
5857

bin/spark-shell

+2-2
Original file line numberDiff line numberDiff line change
@@ -46,11 +46,11 @@ function main(){
4646
# (see https://github.com/sbt/sbt/issues/562).
4747
stty -icanon min 1 -echo > /dev/null 2>&1
4848
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
49-
$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main spark-shell "$@"
49+
$FWDIR/bin/spark-submit spark-shell "$@" --class org.apache.spark.repl.Main
5050
stty icanon echo > /dev/null 2>&1
5151
else
5252
export SPARK_SUBMIT_OPTS
53-
$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main spark-shell "$@"
53+
$FWDIR/bin/spark-submit spark-shell "$@" --class org.apache.spark.repl.Main
5454
fi
5555
}
5656

bin/spark-shell.cmd

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,4 @@ rem
1919

2020
set SPARK_HOME=%~dp0..
2121

22-
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd spark-shell --class org.apache.spark.repl.Main %*
22+
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd spark-shell %* --class org.apache.spark.repl.Main

bin/spark-sql

-36
This file was deleted.

core/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
<groupId>org.apache.spark</groupId>
2929
<artifactId>spark-core_2.10</artifactId>
3030
<properties>
31-
<sbt.project.name>core</sbt.project.name>
31+
<sbt.project.name>core</sbt.project.name>
3232
</properties>
3333
<packaging>jar</packaging>
3434
<name>Spark Project Core</name>

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

+2-12
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,6 @@ object SparkSubmit {
4646
private val CLUSTER = 2
4747
private val ALL_DEPLOY_MODES = CLIENT | CLUSTER
4848

49-
// A special jar name that indicates the class being run is inside of Spark itself, and therefore
50-
// no user jar is needed.
51-
private val SPARK_INTERNAL = "spark-internal"
52-
5349
// Special primary resource names that represent shells rather than application jars.
5450
private val SPARK_SHELL = "spark-shell"
5551
private val PYSPARK_SHELL = "pyspark-shell"
@@ -261,9 +257,7 @@ object SparkSubmit {
261257
// In yarn-cluster mode, use yarn.Client as a wrapper around the user class
262258
if (clusterManager == YARN && deployMode == CLUSTER) {
263259
childMainClass = "org.apache.spark.deploy.yarn.Client"
264-
if (args.primaryResource != SPARK_INTERNAL) {
265-
childArgs += ("--jar", args.primaryResource)
266-
}
260+
childArgs += ("--jar", args.primaryResource)
267261
childArgs += ("--class", args.mainClass)
268262
if (args.childArgs != null) {
269263
args.childArgs.foreach { arg => childArgs += ("--arg", arg) }
@@ -338,7 +332,7 @@ object SparkSubmit {
338332
* Return whether the given primary resource represents a user jar.
339333
*/
340334
private def isUserJar(primaryResource: String): Boolean = {
341-
!isShell(primaryResource) && !isPython(primaryResource) && !isInternal(primaryResource)
335+
!isShell(primaryResource) && !isPython(primaryResource)
342336
}
343337

344338
/**
@@ -355,10 +349,6 @@ object SparkSubmit {
355349
primaryResource.endsWith(".py") || primaryResource == PYSPARK_SHELL
356350
}
357351

358-
private[spark] def isInternal(primaryResource: String): Boolean = {
359-
primaryResource == SPARK_INTERNAL
360-
}
361-
362352
/**
363353
* Merge a sequence of comma-separated file lists, some of which may be null to indicate
364354
* no files, into a single comma-separated string.

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

+2-3
Original file line numberDiff line numberDiff line change
@@ -204,9 +204,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
204204

205205
/** Fill in values by parsing user options. */
206206
private def parseOpts(opts: Seq[String]): Unit = {
207-
var inSparkOpts = true
208-
209207
// Delineates parsing of Spark options from parsing of user options.
208+
var inSparkOpts = true
210209
parse(opts)
211210

212211
def parse(opts: Seq[String]): Unit = opts match {
@@ -319,7 +318,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
319318
SparkSubmit.printErrorAndExit(errMessage)
320319
case v =>
321320
primaryResource =
322-
if (!SparkSubmit.isShell(v) && !SparkSubmit.isInternal(v)) {
321+
if (!SparkSubmit.isShell(v)) {
323322
Utils.resolveURI(v).toString
324323
} else {
325324
v

dev/create-release/create-release.sh

+5-5
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,15 @@ if [[ ! "$@" =~ --package-only ]]; then
5353
-Dusername=$GIT_USERNAME -Dpassword=$GIT_PASSWORD \
5454
-Dmaven.javadoc.skip=true \
5555
-Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
56-
-Pyarn -Phive -Phive-thriftserver -Phadoop-2.2 -Pspark-ganglia-lgpl\
56+
-Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl\
5757
-Dtag=$GIT_TAG -DautoVersionSubmodules=true \
5858
--batch-mode release:prepare
5959

6060
mvn -DskipTests \
6161
-Darguments="-DskipTests=true -Dmaven.javadoc.skip=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE}" \
6262
-Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
6363
-Dmaven.javadoc.skip=true \
64-
-Pyarn -Phive -Phive-thriftserver -Phadoop-2.2 -Pspark-ganglia-lgpl\
64+
-Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl\
6565
release:perform
6666

6767
cd ..
@@ -111,10 +111,10 @@ make_binary_release() {
111111
spark-$RELEASE_VERSION-bin-$NAME.tgz.sha
112112
}
113113

114-
make_binary_release "hadoop1" "-Phive -Phive-thriftserver -Dhadoop.version=1.0.4"
115-
make_binary_release "cdh4" "-Phive -Phive-thriftserver -Dhadoop.version=2.0.0-mr1-cdh4.2.0"
114+
make_binary_release "hadoop1" "-Phive -Dhadoop.version=1.0.4"
115+
make_binary_release "cdh4" "-Phive -Dhadoop.version=2.0.0-mr1-cdh4.2.0"
116116
make_binary_release "hadoop2" \
117-
"-Phive -Phive-thriftserver -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 -Pyarn.version=2.2.0"
117+
"-Phive -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 -Pyarn.version=2.2.0"
118118

119119
# Copy data
120120
echo "Copying release tarballs"

dev/run-tests

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ echo "========================================================================="
6565
# (either resolution or compilation) prompts the user for input either q, r,
6666
# etc to quit or retry. This echo is there to make it not block.
6767
if [ -n "$_RUN_SQL_TESTS" ]; then
68-
echo -e "q\n" | SBT_MAVEN_PROFILES="$SBT_MAVEN_PROFILES -Phive -Phive-thriftserver" sbt/sbt clean package \
68+
echo -e "q\n" | SBT_MAVEN_PROFILES="$SBT_MAVEN_PROFILES -Phive" sbt/sbt clean package \
6969
assembly/assembly test | grep -v -e "info.*Resolving" -e "warn.*Merging" -e "info.*Including"
7070
else
7171
echo -e "q\n" | sbt/sbt clean package assembly/assembly test | \

dev/scalastyle

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
# limitations under the License.
1818
#
1919

20-
echo -e "q\n" | sbt/sbt -Phive -Phive-thriftserver scalastyle > scalastyle.txt
20+
echo -e "q\n" | sbt/sbt -Phive scalastyle > scalastyle.txt
2121
# Check style with YARN alpha built too
2222
echo -e "q\n" | sbt/sbt -Pyarn -Phadoop-0.23 -Dhadoop.version=0.23.9 yarn-alpha/scalastyle \
2323
>> scalastyle.txt

0 commit comments

Comments
 (0)