Skip to content

Commit

Permalink
revise init script to use secrets from env var
Browse files Browse the repository at this point in the history
  • Loading branch information
hwang-db committed Jun 22, 2022
1 parent b0012b0 commit 0965053
Showing 1 changed file with 23 additions and 12 deletions.
35 changes: 23 additions & 12 deletions adb-external-hive-metastore/initscripts/external_metastore_init.sh
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
#!/bin/bash
# This is an extra init script for stable release client data clusters in the dev Databricks workspace.
# It lives at dbfs:/scripts/external_metastore_init.sh
# Configure spark settings for shared interactive clusters
cat <<'EOF' >/databricks/driver/conf/00-custom-spark.conf
[driver] {
# Loads environment variables to determine the correct JDBC driver to use.
# to use secrets in env var inside init script, we need to create a few local vars to hold them
source /etc/environment

PASSWORD="$HIVE_PASSWORD"
URL="$HIVE_URL"
USER="$HIVE_USER"

# Quoting the label (i.e. EOF) with single quotes to disable variable interpolation.
cat << EOF >>/databricks/driver/conf/00-custom-spark.conf
# Hive specific configuration options.
# spark.hadoop prefix is added to make sure these Hive specific options will propagate to the metastore client.
"spark.hadoop.javax.jdo.option.ConnectionURL" = "{{secrets/hive/HIVE-URL}}"
"spark.hadoop.javax.jdo.option.ConnectionUserName" = "{{secrets/hive/HIVE-USER}}"
"spark.hadoop.javax.jdo.option.ConnectionPassword" = "{{secrets/hive/HIVE-PASSWORD}}"
# JDBC connect string for a JDBC metastore
"spark.hadoop.javax.jdo.option.ConnectionURL"="$URL"
# Username to use against metastore database
"spark.hadoop.javax.jdo.option.ConnectionUserName"="$USER"
# Password to use against metastore database
"spark.hadoop.javax.jdo.option.ConnectionPassword"="$PASSWORD"
# Driver class name for a JDBC metastore
"spark.hadoop.javax.jdo.option.ConnectionDriverName" = "com.microsoft.sqlserver.jdbc.SQLServerDriver"
# Spark specific configuration options
"spark.sql.hive.metastore.version" = "3.1.0"
# Skip this one if <hive-version> is 0.13.x.
"spark.sql.hive.metastore.jars" = "/dbfs/tmp/hive/3-1-0/lib/*"
"spark.hadoop.metastore.catalog.default" = "hive"
"spark.databricks.delta.preview.enabled" = "true"
"datanucleus.fixedDatastore" = "true"
"datanucleus.autoCreateSchema" = "false"
}
EOF

0 comments on commit 0965053

Please sign in to comment.