Last active
October 26, 2015 17:01
-
-
Save deenar/025ed871cc0ddb7c4587 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
## | |
# Generated by Cloudera Manager and should not be modified directly | |
## | |
if [ -z "$SPARK_CONF_DIR" ]; then | |
export SPARK_CONF_DIR=$(cd $(dirname $BASH_SOURCE) && pwd) | |
fi | |
export SPARK_HOME=/opt/tools/spark-1.5.1-bin-hadoop2.6/ | |
export DEFAULT_HADOOP_HOME=/opt/cloudera/parcels/CDH-5.4.0-1.cdh5.4.0.p0.27/lib/hadoop | |
### Path of Spark assembly jar in HDFS | |
export SPARK_JAR_HDFS_PATH=${SPARK_JAR_HDFS_PATH:-''} | |
### Extra libraries needed by some Spark subsystems. | |
CDH_HIVE_HOME=${HIVE_HOME:-'/opt/cloudera/parcels/CDH-5.4.0-1.cdh5.4.0.p0.27/lib/hive'} | |
CDH_FLUME_HOME=${FLUME_HOME:-'/opt/cloudera/parcels/CDH-5.4.0-1.cdh5.4.0.p0.27/lib/flume-ng'} | |
CDH_PARQUET_HOME=${PARQUET_HOME:-'/opt/cloudera/parcels/CDH-5.4.0-1.cdh5.4.0.p0.27/lib/hadoop/../parquet'} | |
CDH_AVRO_HOME=${AVRO_HOME:-'/opt/cloudera/parcels/CDH-5.4.0-1.cdh5.4.0.p0.27/lib/hadoop/../avro'} | |
HADOOP_EXTRA_CLASSPATH=${HADOOP_CLASSPATH:-''} | |
export HADOOP_HOME=${HADOOP_HOME:-$DEFAULT_HADOOP_HOME} | |
if [ -n "$HADOOP_HOME" ]; then | |
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${HADOOP_HOME}/lib/native | |
fi | |
SPARK_EXTRA_LIB_PATH="" | |
if [ -n "$SPARK_EXTRA_LIB_PATH" ]; then | |
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$SPARK_EXTRA_LIB_PATH | |
fi | |
export LD_LIBRARY_PATH | |
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$SPARK_CONF_DIR/yarn-conf} | |
# This is needed to support old CDH versions that use a forked version | |
# of compute-classpath.sh. | |
export SCALA_LIBRARY_PATH=${SPARK_HOME}/lib | |
# Set distribution classpath. This is only used in CDH 5.3 and later. | |
SPARK_DIST_CLASSPATH="$HADOOP_HOME/client/*" | |
SPARK_DIST_CLASSPATH="$SPARK_DIST_CLASSPATH:$($HADOOP_HOME/bin/hadoop --config $HADOOP_CONF_DIR classpath)" | |
SPARK_DIST_CLASSPATH="$SPARK_DIST_CLASSPATH:$CDH_HIVE_HOME/lib/*" | |
SPARK_DIST_CLASSPATH="$SPARK_DIST_CLASSPATH:$CDH_FLUME_HOME/lib/*" | |
SPARK_DIST_CLASSPATH="$SPARK_DIST_CLASSPATH:$CDH_PARQUET_HOME/lib/*" | |
SPARK_DIST_CLASSPATH="$SPARK_DIST_CLASSPATH:$CDH_AVRO_HOME/*" | |
if [ -n "$HADOOP_EXTRA_CLASSPATH" ]; then | |
SPARK_DIST_CLASSPATH="$SPARK_DIST_CLASSPATH:$HADOOP_EXTRA_CLASSPATH" | |
fi | |
export SPARK_DIST_CLASSPATH |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
val sqlContext = new org.apache.spark.sql.SQLContext(sparkContext) | |
val results =sqlContext.sql("show databases") | |
java.lang.RuntimeException: [1.1] failure: ``with'' expected but identifier show found | |
show databases | |
^ | |
at scala.sys.package$.error(package.scala:27) | |
at org.apache.spark.sql.catalyst.AbstractSparkSQLParser.parse(AbstractSparkSQLParser.scala:36) | |
at org.apache.spark.sql.catalyst.DefaultParserDialect.parse(ParserDialect.scala:67) | |
at org.apache.spark.sql.SQLContext$$anonfun$3.apply(SQLContext.scala:169) | |
at org.apache.spark.sql.SQLContext$$anonfun$3.apply(SQLContext.scala:169) | |
at org.apache.spark.sql.SparkSQLParser$$anonfun$org$apache$spark$sql$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:115) | |
at org.apache.spark.sql.SparkSQLParser$$anonfun$org$apache$spark$sql$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:114) | |
at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:136) | |
at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:135) | |
at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242) | |
at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242) | |
at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222) | |
at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254) | |
at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254) | |
at scala.util.parsing.combinator.Parsers$Failure.append(Parsers.scala:202) | |
at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254) | |
at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254) | |
at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222) | |
at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891) | |
at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) | |
at scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.scala:890) | |
at scala.util.parsing.combinator.PackratParsers$$anon$1.apply(PackratParsers.scala:110) | |
at org.apache.spark.sql.catalyst.AbstractSparkSQLParser.parse(AbstractSparkSQLParser.scala:34) | |
at org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:166) | |
at org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:166) | |
at org.apache.spark.sql.execution.datasources.DDLParser.parse(DDLParser.scala:42) | |
at org.apache.spark.sql.SQLContext.parseSql(SQLContext.scala:189) | |
at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:719) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:57) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:62) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:64) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:66) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:68) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:70) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:72) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:74) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:76) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:78) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:80) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:82) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:84) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:86) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:88) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:90) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:92) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:94) | |
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:96) | |
at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:98) | |
at $iwC$$iwC$$iwC$$iwC.<init>(<console>:100) | |
at $iwC$$iwC$$iwC.<init>(<console>:102) | |
at $iwC$$iwC.<init>(<console>:104) | |
at $iwC.<init>(<console>:106) | |
at <init>(<console>:108) | |
at .<init>(<console>:112) | |
at .<clinit>(<console>) | |
at .<init>(<console>:7) | |
at .<clinit>(<console>) | |
at $print(<console>) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:497) | |
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) | |
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340) | |
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) | |
at notebook.kernel.Repl$$anonfun$3.apply(Repl.scala:173) | |
at notebook.kernel.Repl$$anonfun$3.apply(Repl.scala:173) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) | |
at scala.Console$.withOut(Console.scala:126) | |
at notebook.kernel.Repl.evaluate(Repl.scala:172) | |
at notebook.client.ReplCalculator$$anonfun$10$$anon$1$$anonfun$22.apply(ReplCalculator.scala:348) | |
at notebook.client.ReplCalculator$$anonfun$10$$anon$1$$anonfun$22.apply(ReplCalculator.scala:345) | |
at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24) | |
at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24) | |
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40) | |
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:397) | |
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) | |
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) | |
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) | |
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment