From 33063c9f10ea9f8844dfe40f093b2ae4ffa81676 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Tue, 25 Nov 2025 06:04:02 +0000 Subject: [PATCH] Post release automated changes for azure-cosmos --- sdk/cosmosdb/cosmos/CHANGELOG.md | 10 + sdk/cosmosdb/cosmos/package.json | 2 +- .../samples/v4/javascript/Data/Families.json | 1 - .../v4/javascript/FabricAadScopeOverride.js | 96 --------- .../cosmos/samples/v4/javascript/README.md | 52 ++--- .../samples/v4/javascript/ThroughputBucket.js | 192 +++++++++--------- .../cosmos/samples/v4/javascript/package.json | 5 +- .../cosmos/samples/v4/typescript/README.md | 52 ++--- .../cosmos/samples/v4/typescript/package.json | 7 +- .../v4/typescript/src/ContainerManagement.ts | 4 +- .../v4/typescript/src/Data/Families.json | 1 - .../typescript/src/FabricAadScopeOverride.ts | 106 ---------- .../v4/typescript/src/ThroughputBucket.ts | 191 +++++++++-------- sdk/cosmosdb/cosmos/src/common/constants.ts | 2 +- 14 files changed, 279 insertions(+), 442 deletions(-) delete mode 100644 sdk/cosmosdb/cosmos/samples/v4/javascript/FabricAadScopeOverride.js delete mode 100644 sdk/cosmosdb/cosmos/samples/v4/typescript/src/FabricAadScopeOverride.ts diff --git a/sdk/cosmosdb/cosmos/CHANGELOG.md b/sdk/cosmosdb/cosmos/CHANGELOG.md index da685d734df6..01af186b6ba4 100644 --- a/sdk/cosmosdb/cosmos/CHANGELOG.md +++ b/sdk/cosmosdb/cosmos/CHANGELOG.md @@ -1,4 +1,14 @@ # Release History +## 4.9.1 (Unreleased) + +### Features Added + +### Breaking Changes + +### Bugs Fixed + +### Other Changes + ## 4.9.0 (2025-11-24) ### Features Added diff --git a/sdk/cosmosdb/cosmos/package.json b/sdk/cosmosdb/cosmos/package.json index f7b9054af235..141f6ac6a5b0 100644 --- a/sdk/cosmosdb/cosmos/package.json +++ b/sdk/cosmosdb/cosmos/package.json @@ -1,6 +1,6 @@ { "name": "@azure/cosmos", - "version": "4.9.0", + "version": "4.9.1", "description": "Microsoft Azure Cosmos DB Service Node.js SDK for NOSQL API", "sdk-type": "client", "keywords": [ diff --git a/sdk/cosmosdb/cosmos/samples/v4/javascript/Data/Families.json b/sdk/cosmosdb/cosmos/samples/v4/javascript/Data/Families.json index 73a534f6b771..91e74060cfa2 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/javascript/Data/Families.json +++ b/sdk/cosmosdb/cosmos/samples/v4/javascript/Data/Families.json @@ -185,4 +185,3 @@ } ] } - diff --git a/sdk/cosmosdb/cosmos/samples/v4/javascript/FabricAadScopeOverride.js b/sdk/cosmosdb/cosmos/samples/v4/javascript/FabricAadScopeOverride.js deleted file mode 100644 index 3004ca82f3ee..000000000000 --- a/sdk/cosmosdb/cosmos/samples/v4/javascript/FabricAadScopeOverride.js +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * @summary Demonstrates how to authenticate and use your database account using AAD credentials with Fabric. - * - * Prerequisites: - * 1. An Azure Cosmos account in fabric environment and database and container created. - * https://learn.microsoft.com/en-us/fabric/database/cosmos-db/overview - * 2. Node.js packages (@azure/cosmos + @azure/identity) and login: - * npm install @azure/cosmos @azure/identity - * az login - * - * Sample - demonstrates how to authenticate and use your database account using AAD credentials with Fabric. - * Read more about operations allowed for this authorization method: https://aka.ms/cosmos-native-rbac - * - * Note: - * This sample assumes the database and container already exist. - * It writes one item (PK path assumed to be "/pk") and reads it back. - */ - -require("dotenv/config"); -const { DefaultAzureCredential } = require("@azure/identity"); -const { CosmosClient } = require("@azure/cosmos"); -const { handleError, finish, logStep } = require("./Shared/handleError.js"); - -// Configuration - replace with your values -const endpoint = process.env.COSMOS_ENDPOINT || ""; -const databaseId = process.env.COSMOS_DATABASE || ""; -const containerId = process.env.COSMOS_CONTAINER || ""; - -function getTestItem(num) { - return { - id: `Item_${num}`, - pk: "partition1", - name: `Item ${num}`, - description: `This is item ${num}`, - runId: crypto.randomUUID(), - }; -} - -async function run() { - - logStep("Setting up AAD credentials"); - - // AAD auth works with az login - const credentials = new DefaultAzureCredential(); - - logStep("Creating Cosmos client with AAD credentials"); - const client = new CosmosClient({ - endpoint, - aadCredentials: credentials, - aadScope: "https://cosmos.azure.com/.default" - }); - - - // Do R/W data operations with your authorized AAD client - logStep("Getting database and container references"); - const database = client.database(databaseId); - const container = database.container(containerId); - - logStep("Creating a test item"); - // Create item - const testItem = getTestItem(0); - const { resource: createdItem } = await container.items.create(testItem); - console.log(`Created item: ${createdItem?.id}`); - - logStep("Reading the item back"); - // Read item - const { resource: readItem } = await container.item(testItem.id, testItem.pk).read(); - console.log("Point read:"); - console.log(JSON.stringify(readItem, null, 2)); - - logStep("Querying for items in the partition"); - // Query items - const querySpec = { - query: "SELECT * FROM c WHERE c.pk = @partitionKey", - parameters: [ - { - name: "@partitionKey", - value: testItem.pk, - }, - ], - }; - - const { resources: items } = await container.items.query(querySpec).fetchAll(); - console.log(`Found ${items.length} items in partition '${testItem.pk}':`); - items.forEach((item) => { - console.log(`- ${item.id}: ${item.name}`); - }); - - logStep("Sample completed successfully"); - await finish(); -} - -run().catch(handleError); diff --git a/sdk/cosmosdb/cosmos/samples/v4/javascript/README.md b/sdk/cosmosdb/cosmos/samples/v4/javascript/README.md index 59a27213efcd..04ff20131969 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/javascript/README.md +++ b/sdk/cosmosdb/cosmos/samples/v4/javascript/README.md @@ -11,29 +11,31 @@ urlFragment: cosmos-javascript These sample programs show how to use the JavaScript client libraries for Azure Cosmos DB in some common scenarios. -| **File Name** | **Description** | -| --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | -| [AlterQueryThroughput.js][alterquerythroughput] | Updates a container offer to change query throughput. | -| [Bulk.js][bulk] | Shows a simple bulk call with each BulkOperation type. | -| [BulkUpdateWithSproc.js][bulkupdatewithsproc] | Bulk Updates documents with a Stored Procedure. Prefer `container.items().bulk()` to this behavior. | -| [ChangeFeed.js][changefeed] | Demonstrates using a ChangeFeed. | -| [ChangeFeedIterator/ChangeFeedHierarchicalPartitionKey.js][changefeediterator_changefeedhierarchicalpartitionkey] | Demonstrates using a ChangeFeed for a partition key | -| [ChangeFeedIterator/ChangeFeedIteratorAllVersionsAndDeletes.js][changefeediterator_changefeediteratorallversionsanddeletes] | Demonstrates using a ChangeFeed in AllVersionsAndDeletes mode for entire container, a partition key, and an epk range | -| [ChangeFeedIterator/ChangeFeedIteratorLatestVersion.js][changefeediterator_changefeediteratorlatestversion] | Demonstrates using a ChangeFeed in LatestVersion mode for entire container, a partition key, and an epk range | -| [ClientSideEncryption.js][clientsideencryption] | Client-Side Encryption | -| [ContainerManagement.js][containermanagement] | Demonstrates container create, read, delete and reading all containers belonging to a database. | -| [DatabaseManagement.js][databasemanagement] | Demonstrates database create, read, delete and reading all databases. | -| [Diagnostics.js][diagnostics] | Demonstrates usage of CosmosDiagnostic Object. | -| [EntraAuth.js][entraauth] | Uses Entra Auth credentials to authenticate with the CosmosClient. | -| [ExcludedLocations.js][excludedlocations] | Demonstrates Cosmos DB operations with excluded regions/locations. | -| [ExecuteBulkOperations.js][executebulkoperations] | Shows CRUD operations using executeBulkOperations API. | -| [HierarchicalPartitioning.js][hierarchicalpartitioning] | Shows various operations on containers with Hierarchical Partitioning. | -| [IndexManagement.js][indexmanagement] | Shows various ways to manage indexing items or changing container index policies. | -| [ItemManagement.js][itemmanagement] | Demonstrates item creation, read, delete and reading all items belonging to a container. | -| [QueryThroughput.js][querythroughput] | Demonstrates query throughput scenarios. | -| [Query/FullTextSearch.js][query_fulltextsearch] | Demonstrates full text search queries. | -| [SasTokenAuth.js][sastokenauth] | Demonstrates using SasTokens for granting scoped access to Cosmos resources. _Private feature_ | -| [ServerSideScripts.js][serversidescripts] | Demonstrates using stored procedures for server side run functions | +| **File Name** | **Description** | +| --------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| [AlterQueryThroughput.js][alterquerythroughput] | Updates a container offer to change query throughput. | +| [Bulk.js][bulk] | Shows a simple bulk call with each BulkOperation type. | +| [BulkUpdateWithSproc.js][bulkupdatewithsproc] | Bulk Updates documents with a Stored Procedure. Prefer `container.items().bulk()` to this behavior. | +| [ChangeFeed.js][changefeed] | Demonstrates using a ChangeFeed. | +| [ChangeFeedIterator/ChangeFeedHierarchicalPartitionKey.js][changefeediterator_changefeedhierarchicalpartitionkey] | Demonstrates using a ChangeFeed for a partition key | +| [ChangeFeedIterator/ChangeFeedIteratorAllVersionsAndDeletes.js][changefeediterator_changefeediteratorallversionsanddeletes] | Demonstrates using a ChangeFeed in AllVersionsAndDeletes mode for entire container, a partition key, and an epk range | +| [ChangeFeedIterator/ChangeFeedIteratorLatestVersion.js][changefeediterator_changefeediteratorlatestversion] | Demonstrates using a ChangeFeed in LatestVersion mode for entire container, a partition key, and an epk range | +| [ClientSideEncryption.js][clientsideencryption] | Client-Side Encryption | +| [ContainerManagement.js][containermanagement] | Demonstrates container create, read, delete and reading all containers belonging to a database. | +| [DatabaseManagement.js][databasemanagement] | Demonstrates database create, read, delete and reading all databases. | +| [Diagnostics.js][diagnostics] | Demonstrates usage of CosmosDiagnostic Object. | +| [EntraAuth.js][entraauth] | Uses Entra Auth credentials to authenticate with the CosmosClient. | +| [ExcludedLocations.js][excludedlocations] | Demonstrates Cosmos DB operations with excluded regions/locations. | +| [ExecuteBulkOperations.js][executebulkoperations] | Shows CRUD operations using executeBulkOperations API. | +| [HierarchicalPartitioning.js][hierarchicalpartitioning] | Shows various operations on containers with Hierarchical Partitioning. | +| [IndexManagement.js][indexmanagement] | Shows various ways to manage indexing items or changing container index policies. | +| [ItemManagement.js][itemmanagement] | Demonstrates item creation, read, delete and reading all items belonging to a container. | +| [PriorityLevel.js][prioritylevel] | Demonstrates using Priority Level support with various Cosmos DB operations Shows how Low/High priority levels affect request throttling behavior | +| [Query/FullTextSearch.js][query_fulltextsearch] | Demonstrates full text search queries. | +| [QueryThroughput.js][querythroughput] | Demonstrates query throughput scenarios. | +| [SasTokenAuth.js][sastokenauth] | Demonstrates using SasTokens for granting scoped access to Cosmos resources. _Private feature_ | +| [ServerSideScripts.js][serversidescripts] | Demonstrates using stored procedures for server side run functions | +| [ThroughputBucket.js][throughputbucket] | Demonstrates throughput bucket operations at the client, database, container and item levels. | ## Prerequisites @@ -92,10 +94,12 @@ Take a look at our [API Documentation][apiref] for more information about the AP [hierarchicalpartitioning]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/HierarchicalPartitioning.js [indexmanagement]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/IndexManagement.js [itemmanagement]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/ItemManagement.js -[querythroughput]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/QueryThroughput.js +[prioritylevel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/PriorityLevel.js [query_fulltextsearch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/Query/FullTextSearch.js +[querythroughput]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/QueryThroughput.js [sastokenauth]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/SasTokenAuth.js [serversidescripts]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/ServerSideScripts.js +[throughputbucket]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/javascript/ThroughputBucket.js [apiref]: https://learn.microsoft.com/javascript/api/@azure/cosmos [freesub]: https://azure.microsoft.com/free/ [createinstance_azurecosmosdbaccount]: https://learn.microsoft.com/azure/cosmos-db/how-to-manage-database-account#create-an-account diff --git a/sdk/cosmosdb/cosmos/samples/v4/javascript/ThroughputBucket.js b/sdk/cosmosdb/cosmos/samples/v4/javascript/ThroughputBucket.js index 5bdc1aba8f96..158536765b26 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/javascript/ThroughputBucket.js +++ b/sdk/cosmosdb/cosmos/samples/v4/javascript/ThroughputBucket.js @@ -3,10 +3,10 @@ /** * @summary Demonstrates throughput bucket operations at the client, database, container and item levels. -*/ + */ require("dotenv/config"); const { ChangeFeedStartFrom, CosmosClient } = require("@azure/cosmos"); -const { logSampleHeader, logStep, finish, handleError } = require("./Shared/handleError.js"); +const { logSampleHeader, handleError, logStep } = require("./Shared/handleError.js"); const { randomUUID } = require("@azure/core-util"); const endpoint = process.env.COSMOS_ENDPOINT || ""; @@ -14,7 +14,6 @@ const key = process.env.COSMOS_KEY || ""; const databaseId = process.env.COSMOS_DATABASE || "ThroughputBucketSampleDB"; const containerId = process.env.COSMOS_CONTAINER || "ThroughputBucketSampleContainer"; -// Global variables to be used across functions let globalClient; let globalContainer; @@ -23,28 +22,28 @@ logSampleHeader("Throughput Bucket Operations"); // Applies throughput bucket 1 to all requests from a client application async function createClientWithThroughputBucket() { logStep("Creating client with throughput bucket 1 for all operations"); - + globalClient = new CosmosClient({ endpoint, key, - throughputBucket: 1 + throughputBucket: 1, }); - + logStep("Client created with throughput bucket 1"); } // Creates database and container for the sample async function setupDatabaseAndContainer() { logStep("Setting up database and container for throughput bucket operations"); - + // Create database const { database } = await globalClient.databases.createIfNotExists({ id: databaseId }); logStep(`Created/found database: ${database.id}`); - + // Create container - const { container } = await database.containers.createIfNotExists({ + const { container } = await database.containers.createIfNotExists({ id: containerId, - partitionKey: { paths: ["/pk"] } + partitionKey: { paths: ["/pk"] }, }); logStep(`Created/found container: ${container.id}`); globalContainer = container; @@ -53,58 +52,66 @@ async function setupDatabaseAndContainer() { // Demonstrates all CRUD operations with different throughput buckets async function demonstrateCRUDWithThroughputBuckets() { logStep("Demonstrating CRUD operations with different throughput buckets"); - + // CREATE - Create item with throughput bucket 2 const createItemId = `create-item-${randomUUID()}`; - const { resource: createdItem } = await globalContainer.items.create({ - id: createItemId, - pk: "crud-pk", - name: "CRUD Test Document", - description: "Created with throughput bucket 2", - operation: "create" - }, { - throughputBucket: 2 - }); - + const { resource: createdItem } = await globalContainer.items.create( + { + id: createItemId, + pk: "crud-pk", + name: "CRUD Test Document", + description: "Created with throughput bucket 2", + operation: "create", + }, + { + throughputBucket: 2, + }, + ); + logStep(`Created item with id: ${createdItem?.id} using throughput bucket 2`); - + // READ - Read item with throughput bucket 3 const { resource: readItem } = await globalContainer.item(createItemId, "crud-pk").read({ - throughputBucket: 3 + throughputBucket: 3, }); - + logStep(`Read item with id: ${readItem.id} using throughput bucket 3`); - + // UPDATE (Replace) - Update item with throughput bucket 4 readItem.description = "Updated with throughput bucket 4"; readItem.operation = "update"; readItem.updatedAt = new Date().toISOString(); - - const { resource: updatedItem } = await globalContainer.item(createItemId, "crud-pk").replace(readItem, { - throughputBucket: 4 - }); - + + const { resource: updatedItem } = await globalContainer + .item(createItemId, "crud-pk") + .replace(readItem, { + throughputBucket: 4, + }); + logStep(`Updated item with id: ${updatedItem.id} using throughput bucket 4`); - + // UPSERT - Upsert items with throughput bucket 5 for (let i = 1; i <= 2; i++) { const upsertItemId = `upsert-item-${i}`; - const { resource: upsertedItem } = await globalContainer.items.upsert({ - id: upsertItemId, - pk: "crud-pk", - name: `Upserted Document ${i}`, - description: `Upserted with bucket 2 - iteration ${i}`, - operation: "upsert" - }, { - throughputBucket: 2 - }); + const { resource: upsertedItem } = await globalContainer.items.upsert( + { + id: upsertItemId, + pk: "crud-pk", + name: `Upserted Document ${i}`, + description: `Upserted with bucket 2 - iteration ${i}`, + operation: "upsert", + }, + { + throughputBucket: 2, + }, + ); logStep(`Upserted item with id: ${upsertedItem?.id} using throughput bucket 2`); } - + // DELETE - Delete item with throughput bucket 6 await globalContainer.item(createItemId, "crud-pk").delete({ - throughputBucket: 4 + throughputBucket: 4, }); logStep(`Deleted item with id: ${createItemId} using throughput bucket 4`); @@ -115,74 +122,79 @@ async function demonstrateCRUDWithThroughputBuckets() { parameters: [ { name: "@operation", - value: "upsert" - } - ] + value: "upsert", + }, + ], }; - - const { resources: queryResults } = await globalContainer.items.query(querySpec, { - throughputBucket: 3 - }).fetchAll(); - + + const { resources: queryResults } = await globalContainer.items + .query(querySpec, { + throughputBucket: 3, + }) + .fetchAll(); + logStep(`Queried ${queryResults.length} items using throughput bucket 3`); - + // Clean up remaining items for (const item of queryResults) { await globalContainer.item(item.id, item.pk).delete(); } - + logStep("Cleaned up remaining CRUD test items"); } // Demonstrates client-level vs operation-level throughput buckets async function demonstrateClientVsOperationBuckets() { logStep("Demonstrating client-level vs operation-level throughput buckets"); - + // Create client with throughput bucket 1 for all operations const bucketClient = new CosmosClient({ endpoint, key, - throughputBucket: 1 + throughputBucket: 1, }); - + const database = bucketClient.database(databaseId); const clientContainer = database.container(globalContainer.id); - + // Operation 1: Uses client's default bucket (1) const itemId1 = `client-bucket-${randomUUID()}`; await clientContainer.items.create({ id: itemId1, pk: "client-test", name: "Uses Client Bucket", - description: "This operation uses the client's default throughput bucket 1" + description: "This operation uses the client's default throughput bucket 1", }); - + logStep("Created item using client's default throughput bucket 1"); - + // Operation 2: Overrides client bucket with operation-specific bucket (2) const itemId2 = `operation-bucket-${randomUUID()}`; - await clientContainer.items.create({ - id: itemId2, - pk: "client-test", - name: "Uses Operation Bucket", - description: "This operation overrides client bucket with throughput bucket 2" - }, { - throughputBucket: 2 - }); - + await clientContainer.items.create( + { + id: itemId2, + pk: "client-test", + name: "Uses Operation Bucket", + description: "This operation overrides client bucket with throughput bucket 2", + }, + { + throughputBucket: 2, + }, + ); + logStep("Created item using operation-specific throughput bucket 2 (overriding client bucket 1)"); - + // Clean up await clientContainer.item(itemId1, "client-test").delete(); await clientContainer.item(itemId2, "client-test").delete(); - + logStep("Cleaned up client vs operation bucket test items"); } // Demonstrates bulk operations with throughput buckets async function demonstrateBulkOperationsWithThroughputBucket() { logStep("Demonstrating bulk operations with throughput bucket 2"); - + // Prepare bulk operations const operations = []; for (let i = 1; i <= 5; i++) { @@ -192,14 +204,14 @@ async function demonstrateBulkOperationsWithThroughputBucket() { id: `bulk-item-${i}`, pk: "bulk-pk", name: `Bulk Document ${i}`, - description: `Created via bulk operation with bucket 2` - } + description: `Created via bulk operation with bucket 2`, + }, }); } - + // Execute bulk operations with throughput bucket 2 const res = await globalContainer.items.executeBulkOperations(operations, { - throughputBucket: 2 + throughputBucket: 2, }); logStep(`Executed ${res.length} bulk operations using throughput bucket 2`); @@ -208,7 +220,7 @@ async function demonstrateBulkOperationsWithThroughputBucket() { // Demonstrates change feed operations with throughput buckets async function demonstrateChangeFeedWithThroughputBucket() { logStep("Demonstrating change feed operations with throughput bucket 2"); - + // Create some test data for change feed const testItems = []; for (let i = 1; i <= 3; i++) { @@ -218,19 +230,19 @@ async function demonstrateChangeFeedWithThroughputBucket() { pk: "changefeed-pk", name: `Change Feed Document ${i}`, description: `Document for change feed demo`, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); testItems.push(itemId); } - + logStep("Created test data for change feed"); - + // Read change feed with throughput bucket 4 const changeFeedIterator = globalContainer.items.getChangeFeedIterator({ changeFeedStartFrom: ChangeFeedStartFrom.Beginning("changefeed-pk"), - throughputBucket: 4 + throughputBucket: 4, }); - + let changeCount = 0; if (changeFeedIterator.hasMoreResults) { const res = await changeFeedIterator.readNext(); @@ -243,14 +255,14 @@ async function demonstrateChangeFeedWithThroughputBucket() { for (const itemId of testItems) { await globalContainer.item(itemId, "changefeed-pk").delete(); } - + logStep("Cleaned up change feed test items"); } // Cleanup function to delete database and container async function cleanup() { logStep("Cleaning up resources"); - + try { await globalClient.database(databaseId).delete(); logStep(`Cleaned up database: ${databaseId}`); @@ -263,10 +275,10 @@ async function runSample() { try { // Initialize client await createClientWithThroughputBucket(); - + // Setup database and container await setupDatabaseAndContainer(); - + // 1. Demonstrate all CRUD operations with different throughput buckets await demonstrateCRUDWithThroughputBuckets(); @@ -278,9 +290,8 @@ async function runSample() { // 4. Demonstrate change feed operations with throughput buckets await demonstrateChangeFeedWithThroughputBucket(); - + logStep("Throughput bucket sample completed successfully!"); - } catch (error) { await handleError(error); } finally { @@ -292,8 +303,7 @@ async function runSample() { } // Run the sample -runSample() - .catch((error) => { - console.error("Sample failed:", error); - process.exit(1); +runSample().catch((error) => { + console.error("Sample failed:", error); + process.exit(1); }); diff --git a/sdk/cosmosdb/cosmos/samples/v4/javascript/package.json b/sdk/cosmosdb/cosmos/samples/v4/javascript/package.json index ee9270feb7c4..e179cd51e89d 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/javascript/package.json +++ b/sdk/cosmosdb/cosmos/samples/v4/javascript/package.json @@ -32,6 +32,9 @@ "@azure/cosmos": "latest", "dotenv": "latest", "@azure/core-util": "^1.11.0", - "@azure/identity": "^4.8.0" + "@azure/identity": "^4.13.0" + }, + "devDependencies": { + "cross-env": "latest" } } diff --git a/sdk/cosmosdb/cosmos/samples/v4/typescript/README.md b/sdk/cosmosdb/cosmos/samples/v4/typescript/README.md index 26f8afc65fd6..cefccc4a1379 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/typescript/README.md +++ b/sdk/cosmosdb/cosmos/samples/v4/typescript/README.md @@ -11,29 +11,31 @@ urlFragment: cosmos-typescript These sample programs show how to use the TypeScript client libraries for Azure Cosmos DB in some common scenarios. -| **File Name** | **Description** | -| --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | -| [AlterQueryThroughput.ts][alterquerythroughput] | Updates a container offer to change query throughput. | -| [Bulk.ts][bulk] | Shows a simple bulk call with each BulkOperation type. | -| [BulkUpdateWithSproc.ts][bulkupdatewithsproc] | Bulk Updates documents with a Stored Procedure. Prefer `container.items().bulk()` to this behavior. | -| [ChangeFeed.ts][changefeed] | Demonstrates using a ChangeFeed. | -| [ChangeFeedIterator/ChangeFeedHierarchicalPartitionKey.ts][changefeediterator_changefeedhierarchicalpartitionkey] | Demonstrates using a ChangeFeed for a partition key | -| [ChangeFeedIterator/ChangeFeedIteratorAllVersionsAndDeletes.ts][changefeediterator_changefeediteratorallversionsanddeletes] | Demonstrates using a ChangeFeed in AllVersionsAndDeletes mode for entire container, a partition key, and an epk range | -| [ChangeFeedIterator/ChangeFeedIteratorLatestVersion.ts][changefeediterator_changefeediteratorlatestversion] | Demonstrates using a ChangeFeed in LatestVersion mode for entire container, a partition key, and an epk range | -| [ClientSideEncryption.ts][clientsideencryption] | Client-Side Encryption | -| [ContainerManagement.ts][containermanagement] | Demonstrates container create, read, delete and reading all containers belonging to a database. | -| [DatabaseManagement.ts][databasemanagement] | Demonstrates database create, read, delete and reading all databases. | -| [Diagnostics.ts][diagnostics] | Demonstrates usage of CosmosDiagnostic Object. | -| [EntraAuth.ts][entraauth] | Uses Entra Auth credentials to authenticate with the CosmosClient. | -| [ExcludedLocations.ts][excludedlocations] | Demonstrates Cosmos DB operations with excluded regions/locations. | -| [ExecuteBulkOperations.ts][executebulkoperations] | Shows CRUD operations using executeBulkOperations API. | -| [HierarchicalPartitioning.ts][hierarchicalpartitioning] | Shows various operations on containers with Hierarchical Partitioning. | -| [IndexManagement.ts][indexmanagement] | Shows various ways to manage indexing items or changing container index policies. | -| [ItemManagement.ts][itemmanagement] | Demonstrates item creation, read, delete and reading all items belonging to a container. | -| [QueryThroughput.ts][querythroughput] | Demonstrates query throughput scenarios. | -| [Query/FullTextSearch.ts][query_fulltextsearch] | Demonstrates full text search queries. | -| [SasTokenAuth.ts][sastokenauth] | Demonstrates using SasTokens for granting scoped access to Cosmos resources. _Private feature_ | -| [ServerSideScripts.ts][serversidescripts] | Demonstrates using stored procedures for server side run functions | +| **File Name** | **Description** | +| --------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| [AlterQueryThroughput.ts][alterquerythroughput] | Updates a container offer to change query throughput. | +| [Bulk.ts][bulk] | Shows a simple bulk call with each BulkOperation type. | +| [BulkUpdateWithSproc.ts][bulkupdatewithsproc] | Bulk Updates documents with a Stored Procedure. Prefer `container.items().bulk()` to this behavior. | +| [ChangeFeed.ts][changefeed] | Demonstrates using a ChangeFeed. | +| [ChangeFeedIterator/ChangeFeedHierarchicalPartitionKey.ts][changefeediterator_changefeedhierarchicalpartitionkey] | Demonstrates using a ChangeFeed for a partition key | +| [ChangeFeedIterator/ChangeFeedIteratorAllVersionsAndDeletes.ts][changefeediterator_changefeediteratorallversionsanddeletes] | Demonstrates using a ChangeFeed in AllVersionsAndDeletes mode for entire container, a partition key, and an epk range | +| [ChangeFeedIterator/ChangeFeedIteratorLatestVersion.ts][changefeediterator_changefeediteratorlatestversion] | Demonstrates using a ChangeFeed in LatestVersion mode for entire container, a partition key, and an epk range | +| [ClientSideEncryption.ts][clientsideencryption] | Client-Side Encryption | +| [ContainerManagement.ts][containermanagement] | Demonstrates container create, read, delete and reading all containers belonging to a database. | +| [DatabaseManagement.ts][databasemanagement] | Demonstrates database create, read, delete and reading all databases. | +| [Diagnostics.ts][diagnostics] | Demonstrates usage of CosmosDiagnostic Object. | +| [EntraAuth.ts][entraauth] | Uses Entra Auth credentials to authenticate with the CosmosClient. | +| [ExcludedLocations.ts][excludedlocations] | Demonstrates Cosmos DB operations with excluded regions/locations. | +| [ExecuteBulkOperations.ts][executebulkoperations] | Shows CRUD operations using executeBulkOperations API. | +| [HierarchicalPartitioning.ts][hierarchicalpartitioning] | Shows various operations on containers with Hierarchical Partitioning. | +| [IndexManagement.ts][indexmanagement] | Shows various ways to manage indexing items or changing container index policies. | +| [ItemManagement.ts][itemmanagement] | Demonstrates item creation, read, delete and reading all items belonging to a container. | +| [PriorityLevel.ts][prioritylevel] | Demonstrates using Priority Level support with various Cosmos DB operations Shows how Low/High priority levels affect request throttling behavior | +| [Query/FullTextSearch.ts][query_fulltextsearch] | Demonstrates full text search queries. | +| [QueryThroughput.ts][querythroughput] | Demonstrates query throughput scenarios. | +| [SasTokenAuth.ts][sastokenauth] | Demonstrates using SasTokens for granting scoped access to Cosmos resources. _Private feature_ | +| [ServerSideScripts.ts][serversidescripts] | Demonstrates using stored procedures for server side run functions | +| [ThroughputBucket.ts][throughputbucket] | Demonstrates throughput bucket operations at the client, database, container and item levels. | ## Prerequisites @@ -104,10 +106,12 @@ Take a look at our [API Documentation][apiref] for more information about the AP [hierarchicalpartitioning]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/HierarchicalPartitioning.ts [indexmanagement]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/IndexManagement.ts [itemmanagement]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ItemManagement.ts -[querythroughput]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/QueryThroughput.ts +[prioritylevel]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/PriorityLevel.ts [query_fulltextsearch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/Query/FullTextSearch.ts +[querythroughput]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/QueryThroughput.ts [sastokenauth]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/SasTokenAuth.ts [serversidescripts]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ServerSideScripts.ts +[throughputbucket]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ThroughputBucket.ts [apiref]: https://learn.microsoft.com/javascript/api/@azure/cosmos [freesub]: https://azure.microsoft.com/free/ [createinstance_azurecosmosdbaccount]: https://learn.microsoft.com/azure/cosmos-db/how-to-manage-database-account#create-an-account diff --git a/sdk/cosmosdb/cosmos/samples/v4/typescript/package.json b/sdk/cosmosdb/cosmos/samples/v4/typescript/package.json index b1579e1a3c3e..d90390b50b1e 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/typescript/package.json +++ b/sdk/cosmosdb/cosmos/samples/v4/typescript/package.json @@ -36,11 +36,12 @@ "@azure/cosmos": "latest", "dotenv": "latest", "@azure/core-util": "^1.11.0", - "@azure/identity": "^4.8.0" + "@azure/identity": "^4.13.0" }, "devDependencies": { "@types/node": "^20.0.0", - "typescript": "~5.8.2", - "rimraf": "latest" + "cross-env": "latest", + "rimraf": "latest", + "typescript": "~5.9.3" } } diff --git a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ContainerManagement.ts b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ContainerManagement.ts index 0a2b07861167..52495b97ad68 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ContainerManagement.ts +++ b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ContainerManagement.ts @@ -237,9 +237,7 @@ async function run(): Promise { // Query to verify items after deletion const queryToVerify = "SELECT c.id, c.name, c.state FROM c WHERE c.state = 'WA'"; - const { resources: waItems } = await containerForDeletion.items - .query(queryToVerify) - .fetchAll(); + const { resources: waItems } = await containerForDeletion.items.query(queryToVerify).fetchAll(); console.log(`Items in WA after deletion: ${waItems.length}`); // Clean up the container diff --git a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/Data/Families.json b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/Data/Families.json index 73a534f6b771..91e74060cfa2 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/Data/Families.json +++ b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/Data/Families.json @@ -185,4 +185,3 @@ } ] } - diff --git a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/FabricAadScopeOverride.ts b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/FabricAadScopeOverride.ts deleted file mode 100644 index 529ff0947c73..000000000000 --- a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/FabricAadScopeOverride.ts +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -/** - * @summary Demonstrates how to authenticate and use your database account using AAD credentials with Fabric. - * - * Prerequisites: - * 1. An Azure Cosmos account in fabric environment and database and container created. - * https://learn.microsoft.com/en-us/fabric/database/cosmos-db/overview - * 2. Node.js packages (@azure/cosmos + @azure/identity) and login: - * npm install @azure/cosmos @azure/identity - * az login - * - * Sample - demonstrates how to authenticate and use your database account using AAD credentials with Fabric. - * Read more about operations allowed for this authorization method: https://aka.ms/cosmos-native-rbac - * - * Note: - * This sample assumes the database and container already exist. - * It writes one item (PK path assumed to be "/pk") and reads it back. - */ - -import "dotenv/config"; -import { DefaultAzureCredential } from "@azure/identity"; -// eslint-disable-next-line @typescript-eslint/ban-ts-comment -// @ts-ignore -import { CosmosClient } from "@azure/cosmos"; -import { handleError, finish, logStep } from "./Shared/handleError.js"; - -// Configuration - replace with your values -const endpoint = process.env.COSMOS_ENDPOINT || ""; -const databaseId = process.env.COSMOS_DATABASE || ""; -const containerId = process.env.COSMOS_CONTAINER || ""; - -// Test item structure -interface TestItem { - id: string; - pk: string; - name: string; - description: string; - runId: string; -} - -function getTestItem(num: number): TestItem { - return { - id: `Item_${num}`, - pk: "partition1", - name: `Item ${num}`, - description: `This is item ${num}`, - runId: crypto.randomUUID(), - }; -} - -async function run(): Promise { - - logStep("Setting up AAD credentials"); - - // AAD auth works with az login - const credentials = new DefaultAzureCredential(); - - logStep("Creating Cosmos client with AAD credentials"); - - const client = new CosmosClient({ - endpoint, - aadCredentials: credentials, - aadScope: "https://cosmos.azure.com/.default" - }); - - logStep("Getting database and container references"); - const database = client.database(databaseId); - const container = database.container(containerId); - - logStep("Creating a test item"); - // Create item - const testItem = getTestItem(0); - const { resource: createdItem } = await container.items.create(testItem); - console.log(`Created item: ${createdItem?.id}`); - - logStep("Reading the item back"); - // Read item - const { resource: readItem } = await container.item(testItem.id, testItem.pk).read(); - console.log("Point read:"); - console.log(JSON.stringify(readItem, null, 2)); - - logStep("Querying for items in the partition"); - // Query items - const querySpec = { - query: "SELECT * FROM c WHERE c.pk = @partitionKey", - parameters: [ - { - name: "@partitionKey", - value: testItem.pk, - }, - ], - }; - - const { resources: items } = await container.items.query(querySpec).fetchAll(); - console.log(`Found ${items.length} items in partition '${testItem.pk}':`); - items.forEach((item) => { - console.log(`- ${item.id}: ${item.name}`); - }); - - logStep("Sample completed successfully"); - await finish(); -} - -run().catch(handleError); diff --git a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ThroughputBucket.ts b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ThroughputBucket.ts index 519c24a41378..d6c30dedd7b2 100644 --- a/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ThroughputBucket.ts +++ b/sdk/cosmosdb/cosmos/samples/v4/typescript/src/ThroughputBucket.ts @@ -3,7 +3,7 @@ /** * @summary Demonstrates throughput bucket operations at the client, database, container and item levels. -*/ + */ import "dotenv/config"; import { ChangeFeedStartFrom, CosmosClient, type Container } from "@azure/cosmos"; import { logSampleHeader, handleError, logStep } from "./Shared/handleError.js"; @@ -22,28 +22,28 @@ logSampleHeader("Throughput Bucket Operations"); // Applies throughput bucket 1 to all requests from a client application async function createClientWithThroughputBucket(): Promise { logStep("Creating client with throughput bucket 1 for all operations"); - + globalClient = new CosmosClient({ endpoint, key, - throughputBucket: 1 + throughputBucket: 1, }); - + logStep("Client created with throughput bucket 1"); } // Creates database and container for the sample async function setupDatabaseAndContainer(): Promise { logStep("Setting up database and container for throughput bucket operations"); - + // Create database const { database } = await globalClient.databases.createIfNotExists({ id: databaseId }); logStep(`Created/found database: ${database.id}`); - + // Create container - const { container } = await database.containers.createIfNotExists({ + const { container } = await database.containers.createIfNotExists({ id: containerId, - partitionKey: { paths: ["/pk"] } + partitionKey: { paths: ["/pk"] }, }); logStep(`Created/found container: ${container.id}`); globalContainer = container; @@ -52,58 +52,66 @@ async function setupDatabaseAndContainer(): Promise { // Demonstrates all CRUD operations with different throughput buckets async function demonstrateCRUDWithThroughputBuckets(): Promise { logStep("Demonstrating CRUD operations with different throughput buckets"); - + // CREATE - Create item with throughput bucket 2 const createItemId = `create-item-${randomUUID()}`; - const { resource: createdItem } = await globalContainer.items.create({ - id: createItemId, - pk: "crud-pk", - name: "CRUD Test Document", - description: "Created with throughput bucket 2", - operation: "create" - }, { - throughputBucket: 2 - }); - + const { resource: createdItem } = await globalContainer.items.create( + { + id: createItemId, + pk: "crud-pk", + name: "CRUD Test Document", + description: "Created with throughput bucket 2", + operation: "create", + }, + { + throughputBucket: 2, + }, + ); + logStep(`Created item with id: ${createdItem?.id} using throughput bucket 2`); - + // READ - Read item with throughput bucket 3 const { resource: readItem } = await globalContainer.item(createItemId, "crud-pk").read({ - throughputBucket: 3 + throughputBucket: 3, }); - + logStep(`Read item with id: ${readItem.id} using throughput bucket 3`); - + // UPDATE (Replace) - Update item with throughput bucket 4 readItem.description = "Updated with throughput bucket 4"; readItem.operation = "update"; readItem.updatedAt = new Date().toISOString(); - - const { resource: updatedItem } = await globalContainer.item(createItemId, "crud-pk").replace(readItem, { - throughputBucket: 4 - }); - + + const { resource: updatedItem } = await globalContainer + .item(createItemId, "crud-pk") + .replace(readItem, { + throughputBucket: 4, + }); + logStep(`Updated item with id: ${updatedItem.id} using throughput bucket 4`); - + // UPSERT - Upsert items with throughput bucket 5 for (let i = 1; i <= 2; i++) { const upsertItemId = `upsert-item-${i}`; - const { resource: upsertedItem } = await globalContainer.items.upsert({ - id: upsertItemId, - pk: "crud-pk", - name: `Upserted Document ${i}`, - description: `Upserted with bucket 2 - iteration ${i}`, - operation: "upsert" - }, { - throughputBucket: 2 - }); + const { resource: upsertedItem } = await globalContainer.items.upsert( + { + id: upsertItemId, + pk: "crud-pk", + name: `Upserted Document ${i}`, + description: `Upserted with bucket 2 - iteration ${i}`, + operation: "upsert", + }, + { + throughputBucket: 2, + }, + ); logStep(`Upserted item with id: ${upsertedItem?.id} using throughput bucket 2`); } - + // DELETE - Delete item with throughput bucket 6 await globalContainer.item(createItemId, "crud-pk").delete({ - throughputBucket: 4 + throughputBucket: 4, }); logStep(`Deleted item with id: ${createItemId} using throughput bucket 4`); @@ -114,74 +122,79 @@ async function demonstrateCRUDWithThroughputBuckets(): Promise { parameters: [ { name: "@operation", - value: "upsert" - } - ] + value: "upsert", + }, + ], }; - - const { resources: queryResults } = await globalContainer.items.query(querySpec, { - throughputBucket: 3 - }).fetchAll(); - + + const { resources: queryResults } = await globalContainer.items + .query(querySpec, { + throughputBucket: 3, + }) + .fetchAll(); + logStep(`Queried ${queryResults.length} items using throughput bucket 3`); - + // Clean up remaining items for (const item of queryResults) { await globalContainer.item(item.id, item.pk).delete(); } - + logStep("Cleaned up remaining CRUD test items"); } // Demonstrates client-level vs operation-level throughput buckets async function demonstrateClientVsOperationBuckets(): Promise { logStep("Demonstrating client-level vs operation-level throughput buckets"); - + // Create client with throughput bucket 1 for all operations const bucketClient = new CosmosClient({ endpoint, key, - throughputBucket: 1 + throughputBucket: 1, }); - + const database = bucketClient.database(databaseId); const clientContainer = database.container(globalContainer.id); - + // Operation 1: Uses client's default bucket (1) const itemId1 = `client-bucket-${randomUUID()}`; await clientContainer.items.create({ id: itemId1, pk: "client-test", name: "Uses Client Bucket", - description: "This operation uses the client's default throughput bucket 1" + description: "This operation uses the client's default throughput bucket 1", }); - + logStep("Created item using client's default throughput bucket 1"); - + // Operation 2: Overrides client bucket with operation-specific bucket (2) const itemId2 = `operation-bucket-${randomUUID()}`; - await clientContainer.items.create({ - id: itemId2, - pk: "client-test", - name: "Uses Operation Bucket", - description: "This operation overrides client bucket with throughput bucket 2" - }, { - throughputBucket: 2 - }); - + await clientContainer.items.create( + { + id: itemId2, + pk: "client-test", + name: "Uses Operation Bucket", + description: "This operation overrides client bucket with throughput bucket 2", + }, + { + throughputBucket: 2, + }, + ); + logStep("Created item using operation-specific throughput bucket 2 (overriding client bucket 1)"); - + // Clean up await clientContainer.item(itemId1, "client-test").delete(); await clientContainer.item(itemId2, "client-test").delete(); - + logStep("Cleaned up client vs operation bucket test items"); } // Demonstrates bulk operations with throughput buckets async function demonstrateBulkOperationsWithThroughputBucket(): Promise { logStep("Demonstrating bulk operations with throughput bucket 2"); - + // Prepare bulk operations const operations = []; for (let i = 1; i <= 5; i++) { @@ -191,14 +204,14 @@ async function demonstrateBulkOperationsWithThroughputBucket(): Promise { id: `bulk-item-${i}`, pk: "bulk-pk", name: `Bulk Document ${i}`, - description: `Created via bulk operation with bucket 2` - } + description: `Created via bulk operation with bucket 2`, + }, }); } - + // Execute bulk operations with throughput bucket 2 const res = await globalContainer.items.executeBulkOperations(operations, { - throughputBucket: 2 + throughputBucket: 2, }); logStep(`Executed ${res.length} bulk operations using throughput bucket 2`); @@ -207,7 +220,7 @@ async function demonstrateBulkOperationsWithThroughputBucket(): Promise { // Demonstrates change feed operations with throughput buckets async function demonstrateChangeFeedWithThroughputBucket(): Promise { logStep("Demonstrating change feed operations with throughput bucket 2"); - + // Create some test data for change feed const testItems = []; for (let i = 1; i <= 3; i++) { @@ -217,19 +230,19 @@ async function demonstrateChangeFeedWithThroughputBucket(): Promise { pk: "changefeed-pk", name: `Change Feed Document ${i}`, description: `Document for change feed demo`, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); testItems.push(itemId); } - + logStep("Created test data for change feed"); - + // Read change feed with throughput bucket 4 const changeFeedIterator = globalContainer.items.getChangeFeedIterator({ changeFeedStartFrom: ChangeFeedStartFrom.Beginning("changefeed-pk"), - throughputBucket: 4 + throughputBucket: 4, }); - + let changeCount = 0; if (changeFeedIterator.hasMoreResults) { const res = await changeFeedIterator.readNext(); @@ -242,14 +255,14 @@ async function demonstrateChangeFeedWithThroughputBucket(): Promise { for (const itemId of testItems) { await globalContainer.item(itemId, "changefeed-pk").delete(); } - + logStep("Cleaned up change feed test items"); } // Cleanup function to delete database and container async function cleanup(): Promise { logStep("Cleaning up resources"); - + try { await globalClient.database(databaseId).delete(); logStep(`Cleaned up database: ${databaseId}`); @@ -262,10 +275,10 @@ async function runSample(): Promise { try { // Initialize client await createClientWithThroughputBucket(); - + // Setup database and container await setupDatabaseAndContainer(); - + // 1. Demonstrate all CRUD operations with different throughput buckets await demonstrateCRUDWithThroughputBuckets(); @@ -277,9 +290,8 @@ async function runSample(): Promise { // 4. Demonstrate change feed operations with throughput buckets await demonstrateChangeFeedWithThroughputBucket(); - + logStep("Throughput bucket sample completed successfully!"); - } catch (error: any) { await handleError(error); } finally { @@ -291,8 +303,7 @@ async function runSample(): Promise { } // Run the sample -runSample() - .catch((error) => { - console.error("Sample failed:", error); - process.exit(1); - }); +runSample().catch((error) => { + console.error("Sample failed:", error); + process.exit(1); +}); diff --git a/sdk/cosmosdb/cosmos/src/common/constants.ts b/sdk/cosmosdb/cosmos/src/common/constants.ts index 1c35fef46e1c..ac49fa14d72b 100644 --- a/sdk/cosmosdb/cosmos/src/common/constants.ts +++ b/sdk/cosmosdb/cosmos/src/common/constants.ts @@ -224,7 +224,7 @@ export const Constants = { AzureNamespace: "Azure.Cosmos", AzurePackageName: "@azure/cosmos", SDKName: "azure-cosmos-js", - SDKVersion: "4.9.0", + SDKVersion: "4.9.1", // Diagnostics CosmosDbDiagnosticLevelEnvVarName: "AZURE_COSMOSDB_DIAGNOSTICS_LEVEL",