diff --git a/samples/computed-col-test-cases/computed-col-test.ipynb b/samples/computed-col-test-cases/computed-col-test.ipynb index a6498627..2cb07ee2 100644 --- a/samples/computed-col-test-cases/computed-col-test.ipynb +++ b/samples/computed-col-test-cases/computed-col-test.ipynb @@ -13,6 +13,12 @@ "version": 3 }, "pygments_lexer": "python3" + }, + "extensions": { + "azuredatastudio": { + "version": 1, + "views": [] + } } }, "nbformat_minor": 2, @@ -24,64 +30,67 @@ "# spark = SparkSession.builder.getOrCreate()\r\n", "# sc.setLogLevel(\"INFO\")\r\n", "spark.sparkContext.setLogLevel('DEBUG')\r\n", - "data = [(1, \"2020\", \"01\"),\r\n", - " (2, \"2020\", \"02\"),\r\n", - " (3, \"2020\", \"03\")]\r\n", - "columns = [\"Id\", \"Year\", \"Month\"]\r\n", - "df = spark.createDataFrame(data = data, schema = columns)\r\n", - "df.show()" + "spark.conf.set('spark.sql.caseSensitive', True)\r\n", + "\r\n", + "servername = \"jdbc:sqlserver://master-0.master-svc\"\r\n", + "dbname = \"connector_test_db\"\r\n", + "url = servername + \";\" + \"databaseName=\" + dbname + \";\"\r\n", + "\r\n", + "user = \"connector_user1\"\r\n", + "password = \"password123!#\" # Please specify password here" ], "metadata": { "azdata_cell_guid": "fd51c6bb-ca56-47f7-b8ed-62baad1bd782", - "tags": [] + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "stream", "name": "stdout", - "text": "Starting Spark application\n", - "output_type": "stream" + "text": "Starting Spark application\n" }, { + "output_type": "display_data", "data": { "text/plain": "", - "text/html": "\n
IDYARN Application IDKindStateSpark UIDriver logCurrent session?
3application_1617815319900_0008pysparkidleLinkLink
" + "text/html": "\n
IDYARN Application IDKindStateSpark UIDriver logCurrent session?
6application_1663974487519_0013pysparkidleLinkLink
" }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "38d9bc1016e047b9b974ccaab4f34ad8" + "model_id": "9cf85570cd994e8a8545e85a0aa2573b" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "SparkSession available as 'spark'.\n", - "output_type": "stream" + "text": "SparkSession available as 'spark'.\n" }, { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "fec6769d00164cb5a00c6f72cf40cf5b" + "model_id": "0339dfbf3823410fb06a568e4816cb8a" } }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "text": "+---+----+-----+\n| Id|Year|Month|\n+---+----+-----+\n| 1|2020| 01|\n| 2|2020| 02|\n| 3|2020| 03|\n+---+----+-----+", - "output_type": "stream" + "metadata": {} } ], "execution_count": 2 @@ -89,38 +98,79 @@ { "cell_type": "code", "source": [ - "servername = \"jdbc:sqlserver://master-0.master-svc\"\r\n", - "dbname = \"connector_test_db1\"\r\n", - "url = servername + \";\" + \"databaseName=\" + dbname + \";\"\r\n", + "from pyspark.sql.types import StructType, StructField, StringType, IntegerType\r\n", "\r\n", - "user = \"connector_user1\"\r\n", - "password = \"password123!#\" # Please specify password here" + "data = [(1, \"2020\", \"01\"),\r\n", + " (2, \"2020\", \"02\"),\r\n", + " (3, \"2020\", \"03\")]\r\n", + "\r\n", + "columns = StructType([\r\n", + " StructField(\"Id\", IntegerType(), nullable = False),\r\n", + " StructField(\"Year\", StringType(), nullable = True),\r\n", + " StructField(\"Month\", StringType(), nullable = True)\r\n", + "])\r\n", + "\r\n", + "df = spark.createDataFrame(data = data, schema = columns)\r\n", + "df.show()\r\n", + "df.printSchema()\r\n", + "\r\n", + "# posts_data dataframe is for Graph table\r\n", + "posts_data = [(1,'Intro','Hi There This is ABC'),\r\n", + " (2,'Intro','Hello I''m PQR'),\r\n", + " (3,'Re: Intro','Hey PQR This is XYZ'),\r\n", + " (4,'Geography','Im George from USA'),\r\n", + " (5,'Re:Geography','I''m Mary from OZ'),\r\n", + " (6,'Re:Geography','I''m Peter from UK')]\r\n", + "posts_columns = [\"PostID\", \"PostTitle\", \"PostBody\"]\r\n", + "posts_df = spark.createDataFrame(data = posts_data, schema = posts_columns)\r\n", + "posts_df.show()" ], "metadata": { - "azdata_cell_guid": "fdc8b9c8-36eb-4280-b32e-6a95b0e11e45" + "azdata_cell_guid": "fdc8b9c8-36eb-4280-b32e-6a95b0e11e45", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "802063b9be4e4abf97c8fc2b84789b32" + "model_id": "e34c302c9dc24579ba6ba1dfbf2cb8ad" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "+---+----+-----+\n| Id|Year|Month|\n+---+----+-----+\n| 1|2020| 01|\n| 2|2020| 02|\n| 3|2020| 03|\n+---+----+-----+\n\nroot\n |-- Id: integer (nullable = false)\n |-- Year: string (nullable = true)\n |-- Month: string (nullable = true)\n\n+------+------------+--------------------+\n|PostID| PostTitle| PostBody|\n+------+------------+--------------------+\n| 1| Intro|Hi There This is ABC|\n| 2| Intro| Hello Im PQR|\n| 3| Re: Intro| Hey PQR This is XYZ|\n| 4| Geography| Im George from USA|\n| 5|Re:Geography| Im Mary from OZ|\n| 6|Re:Geography| Im Peter from UK|\n+------+------------+--------------------+" } ], "execution_count": 3 }, + { + "cell_type": "markdown", + "source": [ + "### **1\\. Write to tables with computed columns**" + ], + "metadata": { + "azdata_cell_guid": "9bd52c1c-fe0c-4f4c-8726-023f3ff642a6" + }, + "attachments": {} + }, { "cell_type": "code", "source": [ "## append with computed columns at start and end\r\n", "# Write from Spark to SQL table using MSSQL Spark Connector\r\n", - "dbtable = \"test1\"\r\n", + "dbtable = \"test_computed_col_1\"\r\n", "\r\n", "try:\r\n", " df.write \\\r\n", @@ -130,6 +180,7 @@ " .option(\"dbtable\", dbtable) \\\r\n", " .option(\"user\", user) \\\r\n", " .option(\"password\", password) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year, Month\") \\\r\n", " .save()\r\n", "except ValueError as error :\r\n", " print(\"MSSQL Connector write failed\", error)\r\n", @@ -137,34 +188,40 @@ "print(\"MSSQL Connector write(append) succeeded \")" ], "metadata": { - "azdata_cell_guid": "2f936a04-6110-47d7-ab25-87308576ab57" + "azdata_cell_guid": "2f936a04-6110-47d7-ab25-87308576ab57", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "4ebad3e502dd48a2924e8762da7de964" + "model_id": "559afaa5815f4ed589d7824b5afc1037" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "MSSQL Connector write(append) succeeded", - "output_type": "stream" + "text": "MSSQL Connector write(append) succeeded" } ], - "execution_count": 25 + "execution_count": 5 }, { "cell_type": "code", "source": [ "#Read from SQL table using MSSQ Connector\r\n", - "dbtable = \"test1\"\r\n", + "dbtable = \"test_computed_col_1\"\r\n", "\r\n", "jdbcDF = spark.read \\\r\n", " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", @@ -176,35 +233,41 @@ "jdbcDF.show(5)" ], "metadata": { - "azdata_cell_guid": "f957925e-2d9a-4cf0-a610-28fcf1e746ba" + "azdata_cell_guid": "f957925e-2d9a-4cf0-a610-28fcf1e746ba", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "2fd8cdda3c284875944c0399527cd36a" + "model_id": "a96f82f316e34222af4e04d15beba91a" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "+-------+---+----+-----+--------+\n| Date| Id|Year|Month| Years|\n+-------+---+----+-----+--------+\n|2020-01| 1|2020| 01|20202020|\n|2020-02| 2|2020| 02|20202020|\n|2020-03| 3|2020| 03|20202020|\n+-------+---+----+-----+--------+", - "output_type": "stream" + "text": "+-------+---+----+-----+--------+\n| Date| Id|Year|Month| Years|\n+-------+---+----+-----+--------+\n|2020-02| 2|2020| 02|20202020|\n|2020-03| 3|2020| 03|20202020|\n|2020-01| 1|2020| 01|20202020|\n+-------+---+----+-----+--------+" } ], - "execution_count": 26 + "execution_count": 6 }, { "cell_type": "code", "source": [ "## append with 1 computed column in between\r\n", "# Write from Spark to SQL table using MSSQL Spark Connector\r\n", - "dbtable = \"test2\"\r\n", + "dbtable = \"test_computed_col_2\"\r\n", "\r\n", "try:\r\n", " df.write \\\r\n", @@ -214,6 +277,7 @@ " .option(\"dbtable\", dbtable) \\\r\n", " .option(\"user\", user) \\\r\n", " .option(\"password\", password) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year, Month\") \\\r\n", " .save()\r\n", "except ValueError as error :\r\n", " print(\"MSSQL Connector write failed\", error)\r\n", @@ -222,34 +286,40 @@ ], "metadata": { "azdata_cell_guid": "f51da718-8527-45c0-a739-b0e64af4e7a6", - "tags": [] + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "aa66d89ae91e464989173d8c25012168" + "model_id": "cb2c59ccf8f741958f3f986811fa6df1" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "MSSQL Connector write(append) succeeded", - "output_type": "stream" + "text": "MSSQL Connector write(append) succeeded" } ], - "execution_count": 23 + "execution_count": 7 }, { "cell_type": "code", "source": [ "#Read from SQL table using MSSQL Connector\r\n", - "dbtable = \"test2\"\r\n", + "dbtable = \"test_computed_col_2\"\r\n", "\r\n", "jdbcDF = spark.read \\\r\n", " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", @@ -261,35 +331,41 @@ "jdbcDF.show(5)" ], "metadata": { - "azdata_cell_guid": "bd1b3ccd-3ed7-4b21-9b57-95d1d774d76e" + "azdata_cell_guid": "bd1b3ccd-3ed7-4b21-9b57-95d1d774d76e", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "3ce2396102ee419192cd8f41c06ea044" + "model_id": "665800375fd94420b898b52ba2cfa4e3" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "+---+-------+----+-----+\n| Id| Date|Year|Month|\n+---+-------+----+-----+\n| 1|2020-01|2020| 01|\n| 2|2020-02|2020| 02|\n| 3|2020-03|2020| 03|\n+---+-------+----+-----+", - "output_type": "stream" + "text": "+---+-------+----+-----+\n| Id| Date|Year|Month|\n+---+-------+----+-----+\n| 1|2020-01|2020| 01|\n| 2|2020-02|2020| 02|\n| 3|2020-03|2020| 03|\n+---+-------+----+-----+" } ], - "execution_count": 24 + "execution_count": 8 }, { "cell_type": "code", "source": [ "## append with 2 computed columns in between\r\n", "# Write from Spark to SQL table using MSSQL Spark Connector\r\n", - "dbtable = \"test3\"\r\n", + "dbtable = \"test_computed_col_3\"\r\n", "\r\n", "try:\r\n", " df.write \\\r\n", @@ -299,7 +375,7 @@ " .option(\"dbtable\", dbtable) \\\r\n", " .option(\"user\", user) \\\r\n", " .option(\"password\", password) \\\r\n", - " .option(\"schemaCheckEnabled\", False) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year, Month\") \\\r\n", " .save()\r\n", "except ValueError as error :\r\n", " print(\"MSSQL Connector write failed\", error)\r\n", @@ -308,34 +384,40 @@ ], "metadata": { "azdata_cell_guid": "10bbf920-e50f-4098-be1b-f06024a52fac", - "tags": [] + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "2967ac7f14e34d27970e0833b7eb5645" + "model_id": "dccd78bd9d9e44468d1f0e017e4cfbe4" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "MSSQL Connector write(append) done", - "output_type": "stream" + "text": "MSSQL Connector write(append) done" } ], - "execution_count": 21 + "execution_count": 9 }, { "cell_type": "code", "source": [ "#Read from SQL table using MSSQL Connector\r\n", - "dbtable = \"test3\"\r\n", + "dbtable = \"test_computed_col_3\"\r\n", "\r\n", "jdbcDF = spark.read \\\r\n", " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", @@ -348,36 +430,41 @@ ], "metadata": { "azdata_cell_guid": "1c81ee7d-62bc-4a4d-acfa-900b6efeb6ec", - "tags": [] + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "c610b609efaa4d27b53e1a48a417888e" + "model_id": "1e8b69a61aec4538bae15667f76e9976" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "+---+-------+----+--------+-----+\n| Id| Date|Year| Years|Month|\n+---+-------+----+--------+-----+\n| 1|2020-01|2020|20202020| 01|\n| 2|2020-02|2020|20202020| 02|\n| 3|2020-03|2020|20202020| 03|\n+---+-------+----+--------+-----+", - "output_type": "stream" + "text": "+---+-------+----+--------+-----+\n| Id| Date|Year| Years|Month|\n+---+-------+----+--------+-----+\n| 1|2020-01|2020|20202020| 01|\n| 2|2020-02|2020|20202020| 02|\n| 3|2020-03|2020|20202020| 03|\n+---+-------+----+--------+-----+" } ], - "execution_count": 22 + "execution_count": 10 }, { "cell_type": "code", "source": [ "## append with 1 computed column in df and table, but table has 1 less col\r\n", - "# set schemaCheckEnabled\" as False\r\n", "# Write from Spark to SQL table using MSSQL Spark Connector\r\n", - "dbtable = \"test4\"\r\n", + "dbtable = \"test_computed_col_4\"\r\n", "\r\n", "try:\r\n", " df.write \\\r\n", @@ -387,7 +474,7 @@ " .option(\"dbtable\", dbtable) \\\r\n", " .option(\"user\", user) \\\r\n", " .option(\"password\", password) \\\r\n", - " .option(\"schemaCheckEnabled\", False) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year\") \\\r\n", " .save()\r\n", "except ValueError as error :\r\n", " print(\"MSSQL Connector write failed\", error)\r\n", @@ -395,34 +482,40 @@ "print(\"MSSQL Connector write(append) done \")" ], "metadata": { - "azdata_cell_guid": "0fcd38ab-a367-45e7-8d80-acb978176c82" + "azdata_cell_guid": "0fcd38ab-a367-45e7-8d80-acb978176c82", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "98bb7592e1b24920909dd4ed6542737a" + "model_id": "616166dc39d842f4bee396b55934e857" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "MSSQL Connector write(append) done", - "output_type": "stream" + "text": "MSSQL Connector write(append) done" } ], - "execution_count": 19 + "execution_count": 11 }, { "cell_type": "code", "source": [ "#Read from SQL table using MSSQL Connector\r\n", - "dbtable = \"test4\"\r\n", + "dbtable = \"test_computed_col_4\"\r\n", "\r\n", "jdbcDF = spark.read \\\r\n", " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", @@ -434,28 +527,518 @@ "jdbcDF.show(5)" ], "metadata": { - "azdata_cell_guid": "bb9add17-d962-4e9d-b407-cf1d43cbaf98" + "azdata_cell_guid": "bb9add17-d962-4e9d-b407-cf1d43cbaf98", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "python" }, "outputs": [ { + "output_type": "display_data", "data": { "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", "application/vnd.jupyter.widget-view+json": { "version_major": 2, "version_minor": 0, - "model_id": "f7300341de024a018ddad006b91624fc" + "model_id": "2fd506b5123d4e7cbedf6e04eb82e7cb" } }, - "metadata": {}, - "output_type": "display_data" + "metadata": {} }, { + "output_type": "stream", "name": "stdout", - "text": "+---+----+--------+\n| Id|Year| Years|\n+---+----+--------+\n| 1|2020|20202020|\n| 2|2020|20202020|\n| 3|2020|20202020|\n+---+----+--------+", - "output_type": "stream" + "text": "+---+----+--------+\n| Id|Year| Years|\n+---+----+--------+\n| 2|2020|20202020|\n| 3|2020|20202020|\n| 1|2020|20202020|\n+---+----+--------+" + } + ], + "execution_count": 12 + }, + { + "cell_type": "markdown", + "source": [ + "### **2\\. Write to Temporal Tables (Generated Always)**" + ], + "metadata": { + "azdata_cell_guid": "1ce0f440-3209-4ee2-9bb0-0fa82f0c55bb" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "# temporal table: Write from Spark to SQL table using MSSQL Spark Connector\r\n", + "dbtable = \"dbo.tempTest\"\r\n", + "\r\n", + "try:\r\n", + " df.write \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .mode(\"append\") \\\r\n", + " .option(\"truncate\", \"true\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password) \\\r\n", + " .option(\"tableLock\",True) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year, Month\") \\\r\n", + " .save()\r\n", + "except ValueError as error :\r\n", + " print(\"MSSQL Connector write failed\", error)\r\n", + "\r\n", + "print(\"MSSQL Connector write(append) succeeded \")\r\n", + "\r\n", + " #.option(\"schemaCheckEnabled\", False) \\" + ], + "metadata": { + "azdata_cell_guid": "fa16ca86-1837-4613-9650-78eed59eef3e", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "c110870b4a7f4b60a7b13e19cb0ac37a" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "MSSQL Connector write(append) succeeded" + } + ], + "execution_count": 14 + }, + { + "cell_type": "code", + "source": [ + "#Read from SQL table using MSSQ Connector\r\n", + "dbtable = \"dbo.tempTest\"\r\n", + "print(\"read data from SQL server table \")\r\n", + "jdbcDF = spark.read \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password).load()\r\n", + "\r\n", + "jdbcDF.show(truncate = False)" + ], + "metadata": { + "azdata_cell_guid": "dcaa3a1b-4770-4a95-80ea-d3e193091ca0", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "e91c9d4d9a81439a927dd4cc8afd4310" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "read data from SQL server table \n+---+----+-----+--------------------------+--------------------------+\n|Id |Year|Month|ValidFrom |ValidTo |\n+---+----+-----+--------------------------+--------------------------+\n|1 |2020|01 |2022-10-26 06:49:53.320574|9999-12-31 23:59:59.999999|\n|2 |2020|02 |2022-10-26 06:49:53.332836|9999-12-31 23:59:59.999999|\n|3 |2020|03 |2022-10-26 06:49:53.332836|9999-12-31 23:59:59.999999|\n+---+----+-----+--------------------------+--------------------------+" + } + ], + "execution_count": 15 + }, + { + "cell_type": "code", + "source": [ + "# temporal table with computed columns: Write from Spark to SQL table using MSSQL Spark Connector\r\n", + "dbtable = \"test_computed_temp\"\r\n", + "\r\n", + "try:\r\n", + " df.write \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .mode(\"append\") \\\r\n", + " .option(\"truncate\", \"true\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password) \\\r\n", + " .option(\"tableLock\",True) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year, Month\") \\\r\n", + " .option(\"schemaCheckEnabled\", False) \\\r\n", + " .save()\r\n", + "except ValueError as error :\r\n", + " print(\"MSSQL Connector write failed\", error)\r\n", + "\r\n", + "print(\"MSSQL Connector write(append) succeeded \")" + ], + "metadata": { + "azdata_cell_guid": "1283627e-074c-4ff5-8752-b4cdf88b27f3", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "3ef61c42d4d2462cba12b5cc87f5313a" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "MSSQL Connector write(append) succeeded" + } + ], + "execution_count": 16 + }, + { + "cell_type": "code", + "source": [ + "#Read from SQL table using MSSQ Connector\r\n", + "dbtable = \"test_computed_temp\"\r\n", + "print(\"read data from SQL server table \")\r\n", + "jdbcDF = spark.read \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password).load()\r\n", + "\r\n", + "jdbcDF.show(truncate = False)" + ], + "metadata": { + "azdata_cell_guid": "982598ad-2c64-4ee7-9ddf-6e1ae6f0d2c5", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "3089a3ff1367419d848523040b5ba510" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "read data from SQL server table \n+-------+---+----+-----+--------+--------------------------+--------------------------+\n|Date |Id |Year|Month|Years |ValidFrom |ValidTo |\n+-------+---+----+-----+--------+--------------------------+--------------------------+\n|2020-01|1 |2020|01 |20202020|2022-10-26 06:50:00.130968|9999-12-31 23:59:59.999999|\n|2020-02|2 |2020|02 |20202020|2022-10-26 06:50:00.143209|9999-12-31 23:59:59.999999|\n|2020-03|3 |2020|03 |20202020|2022-10-26 06:50:00.143209|9999-12-31 23:59:59.999999|\n+-------+---+----+-----+--------+--------------------------+--------------------------+" + } + ], + "execution_count": 17 + }, + { + "cell_type": "markdown", + "source": [ + "### **3\\. Write to Graph Table**" + ], + "metadata": { + "azdata_cell_guid": "6ccbf927-2f99-44a1-a6d6-21510dbde904" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "# Graph table: Write from Spark to SQL table using MSSQL Spark Connector\r\n", + "dbtable = \"dbo.ForumPosts\"\r\n", + "\r\n", + "try:\r\n", + " posts_df.write \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .mode(\"append\") \\\r\n", + " .option(\"truncate\", \"true\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password) \\\r\n", + " .option(\"tableLock\",True) \\\r\n", + " .option(\"columnsToWrite\", \"PostID, PostTitle, PostBody\") \\\r\n", + " .save()\r\n", + "except ValueError as error :\r\n", + " print(\"MSSQL Connector write failed\", error)\r\n", + "\r\n", + "print(\"MSSQL Connector write(append) succeeded \")" + ], + "metadata": { + "azdata_cell_guid": "1469a88a-58b9-4007-8616-e9c594176b1d", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "8b84231f2eef40128a412e07489114d1" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "MSSQL Connector write(append) succeeded" + } + ], + "execution_count": 18 + }, + { + "cell_type": "code", + "source": [ + "#Read from SQL table using MSSQ Connector\r\n", + "dbtable = \"dbo.ForumPosts\"\r\n", + "print(\"read data from SQL server table \")\r\n", + "jdbcDF = spark.read \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password).load()\r\n", + "\r\n", + "jdbcDF.show(5)" + ], + "metadata": { + "azdata_cell_guid": "3095ba6d-e02f-4a0a-95d1-3e13be55fe20", + "tags": [], + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "4997b53c5a3448968645ff6ddfa02164" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "read data from SQL server table \n+-----------------------------------------+------+------------+--------------------+\n|$node_id_1AEAB501C3FA43E098D64CFA11B7ABC8|PostID| PostTitle| PostBody|\n+-----------------------------------------+------+------------+--------------------+\n| {\"type\":\"node\",\"s...| 1| Intro|Hi There This is ABC|\n| {\"type\":\"node\",\"s...| 2| Intro| Hello Im PQR|\n| {\"type\":\"node\",\"s...| 3| Re: Intro| Hey PQR This is XYZ|\n| {\"type\":\"node\",\"s...| 4| Geography| Im George from USA|\n| {\"type\":\"node\",\"s...| 5|Re:Geography| Im Mary from OZ|\n+-----------------------------------------+------+------------+--------------------+\nonly showing top 5 rows" + } + ], + "execution_count": 19 + }, + { + "cell_type": "markdown", + "source": [ + "### **4\\. Write to Tables with Identity columns**" + ], + "metadata": { + "azdata_cell_guid": "3fd6f1f2-cac4-42c1-a3c0-118de963d6ac" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "# auto increment columns: Write from Spark to SQL table using MSSQL Spark Connector\r\n", + "dbtable = \"test_identity\"\r\n", + "\r\n", + "try:\r\n", + " df.write \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .mode(\"append\") \\\r\n", + " .option(\"truncate\", \"true\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password) \\\r\n", + " .option(\"tableLock\",True) \\\r\n", + " .option(\"columnsToWrite\", \"Year, Month\") \\\r\n", + " .save()\r\n", + "except ValueError as error :\r\n", + " print(\"MSSQL Connector write failed\", error)\r\n", + "\r\n", + "print(\"MSSQL Connector write(append) succeeded \")" + ], + "metadata": { + "azdata_cell_guid": "99bf84bc-c6bb-43a9-aa6b-9f3387cbcae1", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "6c0b5dd47806439ea62b29299c0cd768" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "MSSQL Connector write(append) succeeded" } ], "execution_count": 20 + }, + { + "cell_type": "code", + "source": [ + "#Read from SQL table using MSSQ Connector\r\n", + "dbtable = \"test_identity\"\r\n", + "print(\"read data from SQL server table \")\r\n", + "jdbcDF = spark.read \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password).load()\r\n", + "\r\n", + "jdbcDF.show(5)" + ], + "metadata": { + "azdata_cell_guid": "d477c6a5-4e69-4006-8742-786b41fb4031", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "4d41be9ff5714f4cb114d0d67708ce77" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "read data from SQL server table \n+---+----+-----+--------+\n| Id|Year|Month| Years|\n+---+----+-----+--------+\n| 1|2020| 02|20202020|\n| 2|2020| 03|20202020|\n| 3|2020| 01|20202020|\n+---+----+-----+--------+" + } + ], + "execution_count": 21 + }, + { + "cell_type": "markdown", + "source": [ + "### **5\\. Write to Table skipping some non-nullable columns**" + ], + "metadata": { + "azdata_cell_guid": "340e9ce8-eabb-4cd5-95e9-ff5b3e6b5b46" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "# skip non-nullable column: Write from Spark to SQL table using MSSQL Spark Connector\r\n", + "dbtable = \"test_skip_cols\"\r\n", + "\r\n", + "try:\r\n", + " df.write \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .mode(\"append\") \\\r\n", + " .option(\"truncate\", \"true\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password) \\\r\n", + " .option(\"tableLock\",True) \\\r\n", + " .option(\"columnsToWrite\", \"Id, Year\") \\\r\n", + " .save()\r\n", + "except ValueError as error :\r\n", + " print(\"MSSQL Connector write failed\", error)\r\n", + "\r\n", + "print(\"MSSQL Connector write(append) succeeded \")" + ], + "metadata": { + "azdata_cell_guid": "98ee8100-f43f-468b-af97-9024144f002c", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "058856628d3f4f3facd048ff1f79f7a7" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "MSSQL Connector write(append) succeeded" + } + ], + "execution_count": 24 + }, + { + "cell_type": "code", + "source": [ + "#Read from SQL table using MSSQ Connector\r\n", + "dbtable = \"test_skip_cols\"\r\n", + "print(\"read data from SQL server table \")\r\n", + "jdbcDF = spark.read \\\r\n", + " .format(\"com.microsoft.sqlserver.jdbc.spark\") \\\r\n", + " .option(\"url\", url) \\\r\n", + " .option(\"dbtable\", dbtable) \\\r\n", + " .option(\"user\", user) \\\r\n", + " .option(\"password\", password).load()\r\n", + "\r\n", + "jdbcDF.show(5)" + ], + "metadata": { + "azdata_cell_guid": "d687eb8f-435d-4ba9-b396-9e756185c4ca", + "language": "python" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'),…", + "application/vnd.jupyter.widget-view+json": { + "version_major": 2, + "version_minor": 0, + "model_id": "c2b18433ae964dd18f2b01969becc1d2" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": "read data from SQL server table \n+---+----+-----+\n| Id|Year|Month|\n+---+----+-----+\n| 2|2020| null|\n| 3|2020| null|\n| 1|2020| null|\n+---+----+-----+" + } + ], + "execution_count": 25 } ] } \ No newline at end of file diff --git a/samples/computed-col-test-cases/sql-table-generation.ipynb b/samples/computed-col-test-cases/sql-table-generation.ipynb index ae1abfa9..6a2f0202 100644 --- a/samples/computed-col-test-cases/sql-table-generation.ipynb +++ b/samples/computed-col-test-cases/sql-table-generation.ipynb @@ -8,6 +8,12 @@ "language_info": { "name": "sql", "version": "" + }, + "extensions": { + "azuredatastudio": { + "version": 1, + "views": [] + } } }, "nbformat_minor": 2, @@ -16,10 +22,16 @@ { "cell_type": "code", "source": [ - "CREATE DATABASE connector_test_db1" + "CREATE DATABASE connector_test_db" ], "metadata": { - "azdata_cell_guid": "5b1fd0a6-20b8-4e6a-9578-10bea2a3a43d" + "azdata_cell_guid": "5b1fd0a6-20b8-4e6a-9578-10bea2a3a43d", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "sql" }, "outputs": [ { @@ -32,17 +44,17 @@ { "output_type": "display_data", "data": { - "text/html": "Total execution time: 00:00:00.640" + "text/html": "Total execution time: 00:00:00.562" }, "metadata": {} } ], - "execution_count": 1 + "execution_count": 2 }, { "cell_type": "code", "source": [ - "Use connector_test_db1;\r\n", + "Use connector_test_db;\r\n", "CREATE LOGIN connector_user1 WITH PASSWORD ='password123!#' \r\n", "CREATE USER connector_user1 FROM LOGIN connector_user1\r\n", "\r\n", @@ -62,7 +74,13 @@ "ALTER ROLE [db_datawriter] ADD MEMBER connector_user1" ], "metadata": { - "azdata_cell_guid": "378bb099-8499-4661-9745-527a1ed19579" + "azdata_cell_guid": "378bb099-8499-4661-9745-527a1ed19579", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "sql" }, "outputs": [ { @@ -75,30 +93,46 @@ { "output_type": "display_data", "data": { - "text/html": "Total execution time: 00:00:00.108" + "text/html": "Total execution time: 00:00:00.036" }, "metadata": {} } ], - "execution_count": 2 + "execution_count": 3 + }, + { + "cell_type": "markdown", + "source": [ + "### **1\\. Tables with computed columns**" + ], + "metadata": { + "azdata_cell_guid": "70883f0f-b454-4c6c-957f-272397b77029" + }, + "attachments": {} }, { "cell_type": "code", "source": [ "-- append with computed columns at start and end\r\n", - "USE connector_test_db1;\r\n", + "USE connector_test_db;\r\n", "\r\n", - "CREATE TABLE test1\r\n", + "CREATE TABLE test_computed_col_1\r\n", "(\r\n", " Date As (Year + '-' + Month),\r\n", - " Id bigint,\r\n", + " Id int not null,\r\n", " Year nvarchar(4),\r\n", " Month nvarchar(2),\r\n", " Years AS REPLICATE(Year, 2)\r\n", ")" ], "metadata": { - "azdata_cell_guid": "dde73601-c253-45c8-b385-94da828ef6da" + "azdata_cell_guid": "dde73601-c253-45c8-b385-94da828ef6da", + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "sql" }, "outputs": [ { @@ -111,21 +145,21 @@ { "output_type": "display_data", "data": { - "text/html": "Total execution time: 00:00:00.090" + "text/html": "Total execution time: 00:00:00.019" }, "metadata": {} } ], - "execution_count": 10 + "execution_count": 14 }, { "cell_type": "code", "source": [ "-- append with 1 computed column in between\r\n", - "USE connector_test_db1;\r\n", - "CREATE TABLE test2\r\n", + "USE connector_test_db;\r\n", + "CREATE TABLE test_computed_col_2\r\n", "(\r\n", - " Id bigint,\r\n", + " Id int not null,\r\n", " Date As (Year + '-' + Month),\r\n", " Year nvarchar(4),\r\n", " Month nvarchar(2)\r\n", @@ -133,7 +167,13 @@ ], "metadata": { "azdata_cell_guid": "d25fe7a7-c8cf-498e-89f9-3335277a2db1", - "tags": [] + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "sql" }, "outputs": [ { @@ -146,22 +186,22 @@ { "output_type": "display_data", "data": { - "text/html": "Total execution time: 00:00:00.103" + "text/html": "Total execution time: 00:00:00.013" }, "metadata": {} } ], - "execution_count": 11 + "execution_count": 18 }, { "cell_type": "code", "source": [ "-- append with 2 computed columns in between\r\n", - "USE connector_test_db1;\r\n", + "USE connector_test_db;\r\n", "\r\n", - "CREATE TABLE test3\r\n", + "CREATE TABLE test_computed_col_3\r\n", "(\r\n", - " Id bigint,\r\n", + " Id int not null,\r\n", " Date As (Year + '-' + Month),\r\n", " Year nvarchar(4),\r\n", " Years AS REPLICATE(Year, 2),\r\n", @@ -170,7 +210,13 @@ ], "metadata": { "azdata_cell_guid": "2b952d30-7ca9-49d4-9cd8-92a35d30d881", - "tags": [] + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "sql" }, "outputs": [ { @@ -183,28 +229,275 @@ { "output_type": "display_data", "data": { - "text/html": "Total execution time: 00:00:00.089" + "text/html": "Total execution time: 00:00:00.016" }, "metadata": {} } ], - "execution_count": 12 + "execution_count": 19 }, { "cell_type": "code", "source": [ "-- append with 1 computed column in df and table, but table has 1 less col\r\n", - "USE connector_test_db1;\r\n", + "USE connector_test_db;\r\n", "\r\n", - "CREATE TABLE test4\r\n", + "CREATE TABLE test_computed_col_4\r\n", "(\r\n", - " Id bigint,\r\n", + " Id int not null,\r\n", " Year nvarchar(4),\r\n", " Years AS REPLICATE(Year, 2)\r\n", ")" ], "metadata": { "azdata_cell_guid": "e7b24025-05e0-40de-992f-0ca41bfcc246", + "tags": [], + "extensions": { + "azuredatastudio": { + "views": [] + } + }, + "language": "sql" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Total execution time: 00:00:00.013" + }, + "metadata": {} + } + ], + "execution_count": 20 + }, + { + "cell_type": "markdown", + "source": [ + "### **2\\. Temporal Tables (Generated Always)**" + ], + "metadata": { + "azdata_cell_guid": "d3d3ead0-0b97-43c5-988c-c186bc16f39c" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "USE connector_test_db;\r\n", + "CREATE TABLE dbo.tempTest\r\n", + "(\r\n", + " Id int NOT NULL PRIMARY KEY CLUSTERED\r\n", + " , Year nvarchar(4)\r\n", + " , Month nvarchar(2)\r\n", + " , [ValidFrom] datetime2 GENERATED ALWAYS AS ROW START\r\n", + " , [ValidTo] datetime2 GENERATED ALWAYS AS ROW END\r\n", + " , PERIOD FOR SYSTEM_TIME (ValidFrom, ValidTo)\r\n", + " )\r\n", + "WITH (SYSTEM_VERSIONING = ON (HISTORY_TABLE = dbo.tempTestHistory));" + ], + "metadata": { + "azdata_cell_guid": "881271ce-3847-4b82-93a5-6e5be1d577f3", + "language": "sql" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Total execution time: 00:00:00.021" + }, + "metadata": {} + } + ], + "execution_count": 22 + }, + { + "cell_type": "code", + "source": [ + "-- create table with comptued cols and then convert it to temporal tables\r\n", + "USE connector_test_db;\r\n", + "CREATE TABLE test_computed_temp\r\n", + "(\r\n", + " Date As (Year + '-' + Month),\r\n", + " Id int PRIMARY KEY NOT NULL,\r\n", + " Year nvarchar(4),\r\n", + " Month nvarchar(2),\r\n", + " Years AS REPLICATE(Year, 2)\r\n", + ")\r\n", + "\r\n", + "-- Convert test_computed_col_1 to temporal tables\r\n", + "ALTER TABLE test_computed_temp\r\n", + " ADD \r\n", + " [ValidFrom] datetime2 GENERATED ALWAYS AS ROW START DEFAULT GETUTCDATE()\r\n", + " , [ValidTo] datetime2 GENERATED ALWAYS AS ROW END DEFAULT CONVERT(DATETIME2, '9999-12-31 23:59:59.9999999')\r\n", + " , PERIOD FOR SYSTEM_TIME (ValidFrom, ValidTo)\r\n", + "\r\n", + "ALTER TABLE test_computed_temp SET (SYSTEM_VERSIONING = ON (HISTORY_TABLE = dbo.test_computed_temp_history));" + ], + "metadata": { + "azdata_cell_guid": "e42ca950-3d69-4f91-9790-bec158e50f70", + "language": "sql" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Total execution time: 00:00:00.051" + }, + "metadata": {} + } + ], + "execution_count": 23 + }, + { + "cell_type": "code", + "source": [ + "-- Use below to drop temporal tables\r\n", + "/*\r\n", + "USE connector_test_db;\r\n", + "ALTER TABLE dbo.tempTest SET (SYSTEM_VERSIONING = OFF);\r\n", + "DROP TABLE dbo.tempTest;\r\n", + "DROP TABLE dbo.tempTestHistory;\r\n", + "*/\r\n", + "\r\n", + "/*\r\n", + "USE connector_test_db;\r\n", + "ALTER TABLE dbo.test_computed_temp SET (SYSTEM_VERSIONING = OFF);\r\n", + "DROP TABLE dbo.test_computed_temp;\r\n", + "DROP TABLE test_computed_temp_history;\r\n", + "*/" + ], + "metadata": { + "azdata_cell_guid": "261485c2-7076-49cb-aeb5-1e4048da1eb0", + "language": "sql" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Total execution time: 00:00:00.027" + }, + "metadata": {} + } + ], + "execution_count": 21 + }, + { + "cell_type": "markdown", + "source": [ + "### **3\\. Graph Table**" + ], + "metadata": { + "azdata_cell_guid": "02655951-5f8e-4420-973b-f5545f4658f6" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "USE connector_test_db\r\n", + "GO\r\n", + "\r\n", + "CREATE TABLE [dbo].[ForumPosts](\r\n", + " [PostID] [bigint] NULL,\r\n", + " [PostTitle] [varchar](100) NULL,\r\n", + " [PostBody] [varchar](1000) NULL\r\n", + ") AS NODE\r\n", + "GO\r\n", + "\r\n", + "CREATE TABLE [dbo].[Replies]\r\n", + "AS EDGE\r\n", + "GO" + ], + "metadata": { + "azdata_cell_guid": "07283020-b4c9-4a8d-af98-c1be3301a413", + "language": "sql" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Total execution time: 00:00:00.037" + }, + "metadata": {} + } + ], + "execution_count": 10 + }, + { + "cell_type": "markdown", + "source": [ + "### **4\\. Table with Identity column**" + ], + "metadata": { + "azdata_cell_guid": "8af5cf11-1bec-4049-a002-0678e0075afb" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "USE connector_test_db\r\n", + "\r\n", + "CREATE TABLE test_identity\r\n", + "(\r\n", + " Id int IDENTITY(1,1),\r\n", + " Year nvarchar(4),\r\n", + " Month nvarchar(2),\r\n", + " Years AS REPLICATE(Year, 2)\r\n", + ")" + ], + "metadata": { + "azdata_cell_guid": "66493ab5-2e0a-4249-b214-49f027008723", + "language": "sql", "tags": [] }, "outputs": [ @@ -218,12 +511,70 @@ { "output_type": "display_data", "data": { - "text/html": "Total execution time: 00:00:00.257" + "text/html": "Total execution time: 00:00:00.015" }, "metadata": {} } ], - "execution_count": 16 + "execution_count": 11 + }, + { + "cell_type": "markdown", + "source": [ + "### **5\\. Insert to table skipping some columns**" + ], + "metadata": { + "azdata_cell_guid": "6d24c74f-e28b-4bc1-9d86-2ba0bcb37f0b" + }, + "attachments": {} + }, + { + "cell_type": "code", + "source": [ + "USE connector_test_db\r\n", + "\r\n", + "CREATE TABLE test_skip_cols\r\n", + "(\r\n", + " Id int not null,\r\n", + " Year nvarchar(4),\r\n", + " Month nvarchar(2)\r\n", + ")" + ], + "metadata": { + "azdata_cell_guid": "7496e3c4-d5d0-46cc-b8c4-2cfaa40f01ae", + "language": "sql" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": "Commands completed successfully." + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/html": "Total execution time: 00:00:00.015" + }, + "metadata": {} + } + ], + "execution_count": 28 + }, + { + "cell_type": "code", + "source": [ + "DROP LOGIN connector_user1\r\n", + "DROP USER connector_user1\r\n", + "DROP DATABASE connector_test_db" + ], + "metadata": { + "language": "sql", + "azdata_cell_guid": "b3fbc40e-025c-4602-bcd5-41e19644ca5d" + }, + "outputs": [], + "execution_count": null } ] } \ No newline at end of file