Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion sdk/ai/ai-projects/review/ai-projects-node.api.md
Original file line number Diff line number Diff line change
Expand Up @@ -1841,7 +1841,7 @@ export interface MemoryStoresOperations {
list: (options?: MemoryStoresListMemoryStoresOptionalParams) => PagedAsyncIterableIterator<MemoryStore>;
searchMemories: (name: string, scope: string, options?: MemoryStoresSearchMemoriesOptionalParams) => Promise<MemoryStoreSearchResponse>;
update: (name: string, options?: MemoryStoresUpdateMemoryStoreOptionalParams) => Promise<MemoryStore>;
updateMemories: (name: string, scope: string, options?: MemoryStoresUpdateMemoriesOptionalParams) => PollerLike<OperationState_2<MemoryStoreUpdateResult>, MemoryStoreUpdateResult>;
updateMemories: (name: string, scope: string, options?: MemoryStoresUpdateMemoriesOptionalParams) => MemoryStoreUpdateMemoriesPoller;
}

// @public
Expand All @@ -1867,6 +1867,20 @@ export interface MemoryStoresUpdateMemoryStoreOptionalParams extends OperationOp
metadata?: Record<string, string>;
}

// @public
export type MemoryStoreUpdateMemoriesPoller = PollerLike<MemoryStoreUpdateOperationState, MemoryStoreUpdateResult> & {
readonly updateId?: string;
readonly updateStatus?: MemoryStoreUpdateStatus;
readonly supersededBy?: string;
};

// @public
export type MemoryStoreUpdateOperationState = OperationState_2<MemoryStoreUpdateResult> & {
updateId?: string;
updateStatus?: MemoryStoreUpdateStatus;
supersededBy?: string;
};

// @public
export interface MemoryStoreUpdateResponse {
error?: ApiError;
Expand Down
3 changes: 2 additions & 1 deletion sdk/ai/ai-projects/sample.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
AZURE_AI_PROJECT_ENDPOINT="<project connection string from Microsoft Foundry>"
AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME="<memory chat model deployment name>"
AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME="<memory embedding model deployment name>"
MODEL_DEPLOYMENT_NAME="<model deployment name>"
MODEL_API_KEY="<model api key>"
MODEL_ENDPOINT="<model endpoint url>"
Expand All @@ -22,4 +24,3 @@ IMAGE_GENERATION_MODEL_DEPLOYMENT_NAME="<image generation model deployment name>

MCP_PROJECT_CONNECTION_ID="<mcp project connection id>"
TRIPADVISOR_PROJECT_CONNECTION_ID="<tripadvisor project connection id>"

Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import * as readline from "readline";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const a2aProjectConnectionId =
process.env["A2A_PROJECT_CONNECTION_ID"] || "<a2a project connection id>";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import * as readline from "readline";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const aiSearchConnectionId =
process.env["AZURE_AI_SEARCH_CONNECTION_ID"] || "<ai search project connection id>";
const aiSearchIndexName = process.env["AI_SEARCH_INDEX_NAME"] || "<ai search index name>";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import * as readline from "readline";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const bingCustomSearchProjectConnectionId =
process.env["BING_CUSTOM_SEARCH_PROJECT_CONNECTION_ID"] ||
"<bing custom search project connection id>";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import { AIProjectClient } from "@azure/ai-projects";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const bingProjectConnectionId =
process.env["BING_PROJECT_CONNECTION_ID"] || "<bing project connection id>";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import { AIProjectClient } from "@azure/ai-projects";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const browserAutomationProjectConnectionId =
process.env["BROWSER_AUTOMATION_PROJECT_CONNECTION_ID"] ||
"<browser automation project connection id>";
Expand Down
174 changes: 174 additions & 0 deletions sdk/ai/ai-projects/samples-dev/agents/tools/agentMemorySearch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

/**
* This sample demonstrates how to add conversational memory to an agent by using the
* Memory Search tool. The agent stores memories in a memory store and can recall them
* in later conversations.
*
* @summary Create an agent with Memory Search, capture memories from a conversation,
* and retrieve them in a new conversation.
*
* @azsdk-weight 100
*/

import { DefaultAzureCredential } from "@azure/identity";
import {
AIProjectClient,
MemoryStoreDefaultDefinition,
MemoryStoreDefaultOptions,
MemorySearchTool,
} from "@azure/ai-projects";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const agentModelDeployment =
process.env["MODEL_DEPLOYMENT_NAME"] || "<agent model deployment name>";
const chatModelDeployment =
process.env["AZURE_AI_CHAT_MODEL_DEPLOYMENT_NAME"] || "<memory chat model deployment name>";
const embeddingModelDeployment =
process.env["AZURE_AI_EMBEDDING_MODEL_DEPLOYMENT_NAME"] ||
"<memory embedding model deployment name>";

const memoryStoreName = "my_memory_store_123";
const scope = "user_123";

function delay(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}

export async function main(): Promise<void> {
const project = new AIProjectClient(projectEndpoint, new DefaultAzureCredential());
const openAIClient = await project.getOpenAIClient();

let conversationId: string | undefined;
let followUpConversationId: string | undefined;
let agentVersion:
| {
name: string;
version: string;
}
| undefined;

try {
// Clean up an existing memory store if it already exists
try {
await project.memoryStores.delete(memoryStoreName);
console.log(`Memory store '${memoryStoreName}' deleted`);
} catch (error: any) {
if (error?.statusCode !== 404) {
throw error;
}
}

// Create a memory store with chat and embedding models
const memoryStore = await project.memoryStores.create(
memoryStoreName,
{
kind: "default",
chat_model: chatModelDeployment,
embedding_model: embeddingModelDeployment,
options: {
user_profile_enabled: true,
chat_summary_enabled: true,
} satisfies MemoryStoreDefaultOptions,
} satisfies MemoryStoreDefaultDefinition,
{
description: "Memory store for agent conversations",
},
);
const chatModelUsed =
"chat_model" in memoryStore.definition
? memoryStore.definition.chat_model
: chatModelDeployment;
console.log(
`Created memory store: ${memoryStore.name} (${memoryStore.id}) using chat model '${chatModelUsed}'`,
);

// Configure Memory Search tool to attach to the agent
const memorySearchTool: MemorySearchTool = {
type: "memory_search",
memory_store_name: memoryStore.name,
scope,
update_delay: 1, // wait briefly after conversation inactivity before updating memories
};

// Create an agent that will use the Memory Search tool
const agent = await project.agents.createVersion("MemorySearchAgent", {
kind: "prompt",
model: agentModelDeployment,
instructions:
"You are a helpful assistant that remembers user preferences using the memory search tool.",
tools: [memorySearchTool],
});
agentVersion = {
name: agent.name,
version: agent.version,
};
console.log(`Agent created (id: ${agent.id}, name: ${agent.name}, version: ${agent.version})`);

// Start a conversation and provide details the agent should remember
const conversation = await openAIClient.conversations.create();
conversationId = conversation.id;
console.log(`Conversation started (${conversation.id}). Sending a message to seed memories...`);

const firstResponse = await openAIClient.responses.create(
{
input: "I prefer dark roast coffee and usually drink it in the morning.",
conversation: conversation.id,
},
{
body: { agent: { name: agent.name, type: "agent_reference" } },
},
);
console.log(`Initial response: ${firstResponse.output_text}`);

// Allow time for the memory store to update from this conversation
console.log("Waiting for the memory store to capture the new memory...");
await delay(60000);

// Create a follow-up conversation and ask the agent to recall the stored memory
const followUpConversation = await openAIClient.conversations.create();
followUpConversationId = followUpConversation.id;
console.log(`Follow-up conversation started (${followUpConversation.id}).`);

const followUpResponse = await openAIClient.responses.create(
{
input: "Can you remind me of my usual coffee order?",
conversation: followUpConversation.id,
},
{
body: { agent: { name: agent.name, type: "agent_reference" } },
},
);
console.log(`Follow-up response: ${followUpResponse.output_text}`);
} finally {
console.log("\nCleaning up resources...");
if (conversationId) {
await openAIClient.conversations.delete(conversationId);
console.log(`Conversation ${conversationId} deleted`);
}
if (followUpConversationId) {
await openAIClient.conversations.delete(followUpConversationId);
console.log(`Conversation ${followUpConversationId} deleted`);
}
if (agentVersion) {
await project.agents.deleteVersion(agentVersion.name, agentVersion.version);
console.log("Agent deleted");
}
try {
await project.memoryStores.delete(memoryStoreName);
console.log("Memory store deleted");
} catch (error: any) {
if (error?.statusCode !== 404) {
throw error;
}
}
}

console.log("\nMemory Search agent sample completed!");
}

main().catch((err) => {
console.error("The sample encountered an error:", err);
});
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import * as path from "path";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const weatherSpecPath = path.resolve(__dirname, "../assets", "weather_openapi.json");

function loadOpenApiSpec(specPath: string): unknown {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import * as path from "path";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const tripAdvisorProjectConnectionId =
process.env["TRIPADVISOR_PROJECT_CONNECTION_ID"] || "<tripadvisor project connection id>";
const tripAdvisorSpecPath = path.resolve(__dirname, "../assets", "tripadvisor_openapi.json");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import { AIProjectClient } from "@azure/ai-projects";
import "dotenv/config";

const projectEndpoint = process.env["AZURE_AI_PROJECT_ENDPOINT"] || "<project endpoint>";
const deploymentName = process.env["AZURE_AI_MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const deploymentName = process.env["MODEL_DEPLOYMENT_NAME"] || "<model deployment name>";
const sharepointProjectConnectionId =
process.env["SHAREPOINT_PROJECT_CONNECTION_ID"] || "<sharepoint project connection id>";

Expand Down
Loading