-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup-spark.sh
More file actions
74 lines (53 loc) · 3.18 KB
/
setup-spark.sh
File metadata and controls
74 lines (53 loc) · 3.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
#!/bin/bash
set -e
. ./init.sh
if [ -d "$SPARK_DIR" ]; then
rm -rf $SPARK_DIR
mkdir -p $SPARK_DIR
fi
#wgethttps://downloads.apache.org/spark/spark-$SPARK_V/spark-$SPARK_V-bin-hadoop$SPARK_HADOOP_V.tgz -P $DOWNLOAD_DIR/
tar -xzf $DOWNLOAD_DIR/spark-$SPARK_V-bin-without-hadoop$SPARK_SCALA_V.tgz -C $SPARK_DIR
# mysql connector jar for connecting to metastore
# --- works in ubuntu 18.04 only ---
# sudo apt install -y libmysql-java
# sudo ln -s /usr/share/java/mysql-connector-java.jar $SPARK_HOME/jars/mysql-connector-java.jar
# --- should work universally ---
MYSQL_VERSION=`mysql --version | cut -d ' ' -f 4|cut -d '-' -f 1`
ln -s $INSTALL_DIR/opt/mysql-connector-java-$MYSQL_VERSION.jar $SPARK_HOME/jars/mysql-connector-java.jar
# for some reason spark-hive jar doesn't ship with spark distribution, so downloading it.
curl --progress-bar -L \
"https://repo1.maven.org/maven2/org/apache/spark/spark-hive_${SCALA_V}/${SPARK_V}/spark-hive_${SCALA_V}-${SPARK_V}.jar" \
--output "${SPARK_HOME}/jars/spark-hive_${SCALA_V}-${SPARK_V}.jar"
# config dynamic injection of user_name
cp ./spark_conf/spark-defaults.conf.template ./spark_conf/spark-defaults.conf
sed -i "s%user_name%$USER%g" ./spark_conf/spark-defaults.conf
cp ./spark_conf/spark-defaults.conf $SPARK_HOME/conf/spark-defaults.conf
rm ./spark_conf/spark-defaults.conf
cp ./hive_conf/hive-site.xml.template $SPARK_HOME/conf/hive-site.xml
cp $SPARK_HOME/conf/spark-env.sh.template $SPARK_HOME/conf/spark-env.sh
echo "export JAVA_HOME=$JAVA_V" >> $SPARK_HOME/conf/spark-env.sh
#echo "export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop" >> $SPARK_HOME/conf/spark-env.sh
#HADOOP_CONF_DIR
echo "export SPARK_DIST_CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath)" >> $SPARK_HOME/conf/spark-env.sh
#echo "export SPARK_DIST_CLASSPATH=\$HADOOP_CONF_DIR:\$HADOOP_HOME/share/hadoop/tools/lib/*:\$HADOOP_HOME/share/hadoop/common/lib/*:\$HADOOP_HOME/share/hadoop/common/*:\$HADOOP_HOME/share/hadoop/hdfs:\$HADOOP_HOME/share/hadoop/hdfs/lib/*:\$HADOOP_HOME/share/hadoop/hdfs/*:\$HADOOP_HOME/share/hadoop/mapreduce/lib/*:\$HADOOP_HOME/share/hadoop/mapreduce/*:\$HADOOP_HOME/share/hadoop/yarn:\$HADOOP_HOME/share/hadoop/yarn/lib/*:\$HADOOP_HOME/share/hadoop/yarn/*" >> ~/.bash_aliases
echo "SPARK_HOME=$SPARK_HOME" >> $HIVE_HOME/conf/hive-env.sh
# give spark jars to hive (for hive on spark)
cat <<'EOT' >> $HIVE_HOME/conf/hive-env.sh
SPARK_JARS=""
for jar in `ls $SPARK_HOME/jars`; do
if ! echo $jar | grep -q 'slf4j\|mysql'; then
SPARK_JARS=$SPARK_JARS,$SPARK_HOME/jars/$jar
fi
done
VAR=${SPARK_JARS#?};
export HIVE_AUX_JARS_PATH=$VAR
echo $HIVE_AUX_JARS_PATH
EOT
#cat $HIVE_DIR/apache-hive-$HIVE_V-bin/conf/hive-env.sh
# ln -s $SPARK_DIR/spark-$SPARK_V-bin-hadoop$SPARK_HADOOP_V/jars/spark-network-common_2.11-2.2.0.jar /usr/local/hive/apache-hive-2.3.0-bin/lib/spark-network-common_2.11-2.2.0.jar
# ln -s $SPARK_DIR/spark-$SPARK_V-bin-hadoop$SPARK_HADOOP_V/jars/spark-core_2.11-2.2.0.jar /usr/local/hive/apache-hive-2.3.0-bin/lib/spark-core_2.11-2.2.0.jar
# ln -s $SPARK_DIR/spark-$SPARK_V-bin-hadoop$SPARK_HADOOP_V/jars/scala-library-2.11.8.jar /usr/local/hive/apache-hive-2.3.0-bin/lib/scala-library-2.11.8.jar
# start-master.sh
# start-worker.sh
# start-slave.sh
#jps