forked from asean-rssa/tf_azure_deployment
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
revise init script to use secrets from env var
- Loading branch information
Showing
1 changed file
with
23 additions
and
12 deletions.
There are no files selected for viewing
35 changes: 23 additions & 12 deletions
35
adb-external-hive-metastore/initscripts/external_metastore_init.sh
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,20 +1,31 @@ | ||
#!/bin/bash | ||
# This is an extra init script for stable release client data clusters in the dev Databricks workspace. | ||
# It lives at dbfs:/scripts/external_metastore_init.sh | ||
# Configure spark settings for shared interactive clusters | ||
cat <<'EOF' >/databricks/driver/conf/00-custom-spark.conf | ||
[driver] { | ||
# Loads environment variables to determine the correct JDBC driver to use. | ||
# to use secrets in env var inside init script, we need to create a few local vars to hold them | ||
source /etc/environment | ||
|
||
PASSWORD="$HIVE_PASSWORD" | ||
URL="$HIVE_URL" | ||
USER="$HIVE_USER" | ||
|
||
# Quoting the label (i.e. EOF) with single quotes to disable variable interpolation. | ||
cat << EOF >>/databricks/driver/conf/00-custom-spark.conf | ||
# Hive specific configuration options. | ||
# spark.hadoop prefix is added to make sure these Hive specific options will propagate to the metastore client. | ||
"spark.hadoop.javax.jdo.option.ConnectionURL" = "{{secrets/hive/HIVE-URL}}" | ||
"spark.hadoop.javax.jdo.option.ConnectionUserName" = "{{secrets/hive/HIVE-USER}}" | ||
"spark.hadoop.javax.jdo.option.ConnectionPassword" = "{{secrets/hive/HIVE-PASSWORD}}" | ||
# JDBC connect string for a JDBC metastore | ||
"spark.hadoop.javax.jdo.option.ConnectionURL"="$URL" | ||
# Username to use against metastore database | ||
"spark.hadoop.javax.jdo.option.ConnectionUserName"="$USER" | ||
# Password to use against metastore database | ||
"spark.hadoop.javax.jdo.option.ConnectionPassword"="$PASSWORD" | ||
# Driver class name for a JDBC metastore | ||
"spark.hadoop.javax.jdo.option.ConnectionDriverName" = "com.microsoft.sqlserver.jdbc.SQLServerDriver" | ||
# Spark specific configuration options | ||
"spark.sql.hive.metastore.version" = "3.1.0" | ||
# Skip this one if <hive-version> is 0.13.x. | ||
"spark.sql.hive.metastore.jars" = "/dbfs/tmp/hive/3-1-0/lib/*" | ||
"spark.hadoop.metastore.catalog.default" = "hive" | ||
"spark.databricks.delta.preview.enabled" = "true" | ||
"datanucleus.fixedDatastore" = "true" | ||
"datanucleus.autoCreateSchema" = "false" | ||
} | ||
EOF |