diff --git a/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.MVC/speech/speech-to-text.md b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.MVC/speech/speech-to-text.md new file mode 100644 index 0000000000..4de77f98ca --- /dev/null +++ b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.MVC/speech/speech-to-text.md @@ -0,0 +1,42 @@ +--- +layout: post +title: Speech-to-Text With ##Platform_Name## AI AssistView Control | Syncfusion +description: Checkout and learn about configuration of Speech-to-Text with Azure OpenAI in ##Platform_Name## AI AssistView control of Syncfusion Essential JS 2 and more. +platform: ej2-asp-core-mvc +control: Azure Open AI +publishingplatform: ##Platform_Name## +documentation: ug +--- + +# Speech-to-Text in ASP.NET MVC AI AssistView + +The Syncfusion ASP.NET MVC AI AssistView control supports `Speech-to-Text` functionality through the browser's [Web Speech API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Speech_API), enabling conversion of spoken words into text using the device's microphone. + +## Prerequisites + +Before integrating `Speech-to-Text`, ensure the following: + +1. The Syncfusion AI AssistView control is properly set up in your ASP.NET MVC application. + - [ASP.NET MVC Getting Started Guide](../getting-started) + +2. The AI AssistView control is integrated with [Azure OpenAI](https://microsoft.github.io/PartnerResources/skilling/ai-ml-academy/resources/openai). + - [Integration of Azure OpenAI With ASP.NET MVC AI AssistView control](../ai-integrations/openai-integration) + +## Configure Speech-to-Text + +To enable Speech-to-Text functionality, modify the `Index.cshtml` file to incorporate the Web Speech API. The [SpeechToText](https://ej2.syncfusion.com/aspnetmvc/documentation/speech-to-text/getting-started) control listens for microphone input, transcribes spoken words, and updates the AI AssistView's editable footer with the transcribed text. The transcribed text is then sent as a prompt to the Azure OpenAI service via the AI AssistView control. + +{% tabs %} +{% highlight razor tabtitle="CSHTML" %} +{% include code-snippet/ai-assistview/speech/stt/razor %} +{% endhighlight %} +{% highlight c# tabtitle="SpeechToText.cs" %} +{% include code-snippet/ai-assistview/speech/stt/speechtotextmvc.cs %} +{% endhighlight %} +{% endtabs %} + +![Integrating Speech-to-Text with AI AssistView](images/aiassist-stt.png) + +## See Also + +* [Text-to-Speech](./text-to-speech) diff --git a/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.MVC/speech/text-to-speech.md b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.MVC/speech/text-to-speech.md new file mode 100644 index 0000000000..394cb71033 --- /dev/null +++ b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.MVC/speech/text-to-speech.md @@ -0,0 +1,42 @@ +--- +layout: post +title: Text-to-Speech With ##Platform_Name## AI AssistView Control | Syncfusion +description: Checkout and learn about configuration of Text-to-Speech with Azure OpenAI in ##Platform_Name## AI AssistView control of Syncfusion Essential JS 2 and more. +platform: ej2-asp-core-mvc +control: Azure Open AI +publishingplatform: ##Platform_Name## +documentation: ug +--- + +# Text-to-Speech in ASP.NET MVC AI AssistView + +The Syncfusion TypeScript AI AssistView component supports `Text-to-Speech` (TTS) functionality using the browser's Web Speech API specifically using the [SpeechSynthesisUtterance](https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance) interface to convert AI-generated response into spoken audio. + +## Prerequisites + +Before integrating `Text-to-Speech`, ensure the following: + +1. The Syncfusion AI AssistView control is properly set up in your ASP.NET MVC application. + - [ASP.NET MVC Getting Started Guide](../getting-started) + +2. The AI AssistView control is integrated with [Azure OpenAI](https://microsoft.github.io/PartnerResources/skilling/ai-ml-academy/resources/openai). + - [Integration of Azure OpenAI With ASP.NET MVC AI AssistView control](../ai-integrations/openai-integration) + +## Configure Text-to-Speech + +To enable Text-to-Speech functionality, modify the `Index.cshtml` file to incorporate the Web Speech API. A custom `Read Aloud` button is added to the response toolbar using the [ResponseToolbarSettings](https://help.syncfusion.com/cr/aspnetmvc-js2/Syncfusion.EJ2.InteractiveChat.AIAssistViewResponseToolbarSettings.html) property. When clicked, the [ItemClicked](https://help.syncfusion.com/cr/aspnetmvc-js2/Syncfusion.EJ2.InteractiveChat.AIAssistViewResponseToolbarSettings.html#Syncfusion_EJ2_InteractiveChat_AIAssistViewResponseToolbarSettings_ItemClicked) event extracts plain text from the generated AI response and use the browser SpeechSynthesis API to read it aloud. + +{% tabs %} +{% highlight razor tabtitle="CSHTML" %} +{% include code-snippet/ai-assistview/speech/tts/razor %} +{% endhighlight %} +{% highlight c# tabtitle="SpeechToText.cs" %} +{% include code-snippet/ai-assistview/speech/tts/texttospeechmvc.cs %} +{% endhighlight %} +{% endtabs %} + +![Integrating Text-to-Speech with AI AssistView](images/aiassist-tts.png) + +## See Also + +* [Speech-to-Text](./speech-to-text) diff --git a/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.NETCORE/speech/speech-to-text.md b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.NETCORE/speech/speech-to-text.md new file mode 100644 index 0000000000..0cc91748f4 --- /dev/null +++ b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.NETCORE/speech/speech-to-text.md @@ -0,0 +1,42 @@ +--- +layout: post +title: Speech-to-Text With ##Platform_Name## AI AssistView Control | Syncfusion +description: Checkout and learn about configuration of Speech-to-Text with Azure OpenAI in ##Platform_Name## AI AssistView control of Syncfusion Essential JS 2 and more. +platform: ej2-asp-core-mvc +control: Azure Open AI +publishingplatform: ##Platform_Name## +documentation: ug +--- + +# Speech-to-Text in ASP.NET Core AI AssistView + +The Syncfusion ASP.NET Core AI AssistView control supports `Speech-to-Text` functionality through the browser's [Web Speech API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Speech_API), enabling conversion of spoken words into text using the device's microphone. + +## Prerequisites + +Before integrating `Speech-to-Text`, ensure the following: + +1. The Syncfusion AI AssistView control is properly set up in your ASP.NET Core application. + - [ASP.NET Core Getting Started Guide](../getting-started) + +2. The AI AssistView control is integrated with [Azure OpenAI](https://microsoft.github.io/PartnerResources/skilling/ai-ml-academy/resources/openai). + - [Integration of Azure OpenAI With ASP.NET Core AI AssistView control](../ai-integrations/openai-integration) + +## Configure Speech-to-Text + +To enable Speech-to-Text functionality, modify the `Index.cshtml` file to incorporate the Web Speech API. The [SpeechToText](https://ej2.syncfusion.com/aspnetcore/documentation/speech-to-text/getting-started) control listens for microphone input, transcribes spoken words, and updates the AI AssistView's editable footer with the transcribed text. The transcribed text is then sent as a prompt to the Azure OpenAI service via the AI AssistView control. + +{% tabs %} +{% highlight razor tabtitle="CSHTML" %} +{% include code-snippet/ai-assistview/speech/stt/tagHelper %} +{% endhighlight %} +{% highlight c# tabtitle="Gemini.cs" %} +{% include code-snippet/ai-assistview/speech/stt/speechtotextcore.cs %} +{% endhighlight %} +{% endtabs %} + +![Integrating Speech-to-Text with AI AssistView](images/aiassist-stt.png) + +## See Also + +* [Text-to-Speech](./text-to-speech) diff --git a/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.NETCORE/speech/text-to-speech.md b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.NETCORE/speech/text-to-speech.md new file mode 100644 index 0000000000..a5d6cd4c8a --- /dev/null +++ b/ej2-asp-core-mvc/ai-assistview/EJ2_ASP.NETCORE/speech/text-to-speech.md @@ -0,0 +1,42 @@ +--- +layout: post +title: Text-to-Speech With ##Platform_Name## AI AssistView Control | Syncfusion +description: Checkout and learn about configuration of Text-to-Speech with Azure OpenAI in ##Platform_Name## AI AssistView control of Syncfusion Essential JS 2 and more. +platform: ej2-asp-core-mvc +control: Azure Open AI +publishingplatform: ##Platform_Name## +documentation: ug +--- + +# Text-to-Speech in ASP.NET Core AI AssistView + +The Syncfusion TypeScript AI AssistView component supports `Text-to-Speech` (TTS) functionality using the browser's Web Speech API specifically using the [SpeechSynthesisUtterance](https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance) interface to convert AI-generated response into spoken audio. + +## Prerequisites + +Before integrating `Text-to-Speech`, ensure the following: + +1. The Syncfusion AI AssistView control is properly set up in your ASP.NET Core application. + - [ASP.NET Core Getting Started Guide](../getting-started) + +2. The AI AssistView control is integrated with [Azure OpenAI](https://microsoft.github.io/PartnerResources/skilling/ai-ml-academy/resources/openai). + - [Integration of Azure OpenAI With ASP.NET Core AI AssistView control](../ai-integrations/openai-integration) + +## Configure Text-to-Speech + +To enable Text-to-Speech functionality, modify the `Index.cshtml` file to incorporate the Web Speech API. A custom `Read Aloud` button is added to the response toolbar using the `e-aiassistview-responsetoolbarsettings` tag helper. When clicked, the [itemClicked](https://help.syncfusion.com/cr/aspnetcore-js2/Syncfusion.EJ2.InteractiveChat.AIAssistViewResponseToolbarSettings.html#Syncfusion_EJ2_InteractiveChat_AIAssistViewResponseToolbarSettings_ItemClicked) event extracts plain text from the generated AI response and use the browser SpeechSynthesis API to read it aloud. + +{% tabs %} +{% highlight razor tabtitle="CSHTML" %} +{% include code-snippet/ai-assistview/speech/tts/tagHelper %} +{% endhighlight %} +{% highlight c# tabtitle="Gemini.cs" %} +{% include code-snippet/ai-assistview/speech/tts/texttospeechcore.cs %} +{% endhighlight %} +{% endtabs %} + +![Integrating Text-to-Speech with AI AssistView](images/aiassist-tts.png) + +## See Also + +* [Speech-to-Text](./speech-to-text) diff --git a/ej2-asp-core-mvc/ai-assistview/images/aiassist-stt.png b/ej2-asp-core-mvc/ai-assistview/images/aiassist-stt.png new file mode 100644 index 0000000000..6bc4fef311 Binary files /dev/null and b/ej2-asp-core-mvc/ai-assistview/images/aiassist-stt.png differ diff --git a/ej2-asp-core-mvc/ai-assistview/images/aiassist-tts.png b/ej2-asp-core-mvc/ai-assistview/images/aiassist-tts.png new file mode 100644 index 0000000000..77061962fb Binary files /dev/null and b/ej2-asp-core-mvc/ai-assistview/images/aiassist-tts.png differ diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/openaicore.cs b/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/openaicore.cs index 5bc65aba86..843b0dd7b6 100644 --- a/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/openaicore.cs +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/openaicore.cs @@ -1,6 +1,8 @@ -using OpenAI; using Azure; using Azure.AI.OpenAI; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.RazorPages; +using OpenAI.Chat; namespace WebApplication4.Pages { public class IndexModel : PageModel @@ -68,22 +70,22 @@ public async Task OnPostGetAIResponse([FromBody] PromptRequest re return BadRequest($"Error generating response: {ex.Message}"); } } - } + } - public class IndexViewModel - { - public List Items { get; set; } = new List(); - public string[] PromptSuggestionData { get; set; } - } + public class IndexViewModel + { + public List Items { get; set; } = new List(); + public string[] PromptSuggestionData { get; set; } + } - public class PromptRequest - { - public string Prompt { get; set; } - } + public class PromptRequest + { + public string Prompt { get; set; } + } - public class ToolbarItemModel - { - public string align { get; set; } - public string iconCss { get; set; } - } + public class ToolbarItemModel + { + public string align { get; set; } + public string iconCss { get; set; } } +} diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/tagHelper b/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/tagHelper index 103c1b2da7..a7872fe9c6 100644 --- a/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/tagHelper +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/ai-integrations/open-ai/tagHelper @@ -1,3 +1,4 @@ +@model IndexModel @using Syncfusion.EJ2.InteractiveChat @{ ViewData["Title"] = "AI Assistance with Gemini"; diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/razor b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/razor new file mode 100644 index 0000000000..55d99d58ea --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/razor @@ -0,0 +1,252 @@ +@using Syncfusion.EJ2.InteractiveChat + +
+ @Html.EJS().AIAssistView("aiAssistView").BannerTemplate("#bannerContent").FooterTemplate("#footerContent").StopRespondingClick("stopRespondingClick").PromptRequest("onPromptRequest").Created("onCreated").ToolbarSettings(new AIAssistViewToolbarSettings() + { + Items = ViewBag.Items, + ItemClicked = "toolbarItemClicked" + }).PromptToolbarSettings(new AIAssistViewPromptToolbarSettings() + { + ItemClicked = "promptToolbarItemClicked" + }).Render() +
+ + +@Html.AntiForgeryToken() + + + + + + + + + \ No newline at end of file diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/speechtotextcore.cs b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/speechtotextcore.cs new file mode 100644 index 0000000000..59d9422bbd --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/speechtotextcore.cs @@ -0,0 +1,84 @@ +using Azure; +using Azure.AI.OpenAI; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.RazorPages; +using OpenAI.Chat; + +namespace WebApplication.Pages +{ + public class IndexModel : PageModel + { + + public IndexViewModel ViewModel { get; set; } = new IndexViewModel(); + public void OnGet() + { + // Initialize toolbar items + ViewModel.Items = new List + { + new ToolbarItemModel + { + iconCss = "e-icons e-refresh", + align = "Right", + } + }; + } + + public async Task OnPostGetAIResponse([FromBody] PromptRequest request) + { + try + { + _logger.LogInformation("Received request with prompt: {Prompt}", request?.Prompt); + + if (string.IsNullOrEmpty(request?.Prompt)) + { + _logger.LogWarning("Prompt is null or empty."); + return BadRequest("Prompt cannot be empty."); + } + + string endpoint = "Your_Azure_OpenAI_Endpoint"; // Replace with your Azure OpenAI endpoint + string apiKey = "YOUR_AZURE_OPENAI_API_KEY"; // Replace with your Azure OpenAI API key + string deploymentName = "YOUR_DEPLOYMENT_NAME"; // Replace with your Azure OpenAI deployment name (e.g., gpt-4o-mini) + + var credential = new AzureKeyCredential(apiKey); + var client = new AzureOpenAIClient(new Uri(endpoint), credential); + var chatClient = client.GetChatClient(deploymentName); + + var chatCompletionOptions = new ChatCompletionOptions(); + var completion = await chatClient.CompleteChatAsync( + new[] { new UserChatMessage(request.Prompt) }, + chatCompletionOptions + ); + string responseText = completion.Value.Content[0].Text; + if (string.IsNullOrEmpty(responseText)) + { + _logger.LogError("Azure OpenAI API returned no text."); + return BadRequest("No response from Azure OpenAI."); + } + + _logger.LogInformation("Azure OpenAI response received: {Response}", responseText); + return new JsonResult(responseText); + } + catch (Exception ex) + { + _logger.LogError("Exception in Azure OpenAI call: {Message}", ex.Message); + return BadRequest($"Error generating response: {ex.Message}"); + } + } + } + + public class IndexViewModel + { + public List Items { get; set; } = new List(); + } + + public class PromptRequest + { + public string Prompt { get; set; } + } + + public class ToolbarItemModel + { + public string align { get; set; } + public string iconCss { get; set; } + } +} diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/speechtotextmvc.cs b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/speechtotextmvc.cs new file mode 100644 index 0000000000..d7372ee5ad --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/speechtotextmvc.cs @@ -0,0 +1,69 @@ +using OpenAI; +using OpenAI.Chat; +using Azure; +using Azure.AI.OpenAI; + +namespace AssistViewDemo.Controllers +{ + public class HomeController : Controller + { + + public List Items { get; set; } = new List(); + + public IActionResult Index() + { + Items.Add(new ToolbarItemModel { iconCss = "e-icons e-refresh", align = "Right" }); + ViewBag.Items = Items; + return View(); + } + public class ToolbarItemModel + { + public string iconCss { get; set; } + public string align { get; set; } + } + [HttpPost] + public async Task GetAIResponse([FromBody] PromptRequest request) + { + try + { + _logger.LogInformation("Received request with prompt: {Prompt}", request?.Prompt); + + if (string.IsNullOrEmpty(request?.Prompt)) + { + _logger.LogWarning("Prompt is null or empty."); + return BadRequest("Prompt cannot be empty."); + } + + // Azure OpenAI configuration + string endpoint = "Your_Azure_OpenAI_Endpoint"; // Replace with your Azure OpenAI endpoint + string apiKey = "YOUR_AZURE_OPENAI_API_KEY"; // Replace with your Azure OpenAI API key + string deploymentName = "YOUR_DEPLOYMENT_NAME"; // Replace with your Azure OpenAI deployment name (e.g., gpt-4o-mini) + + var credential = new AzureKeyCredential(apiKey); + var client = new AzureOpenAIClient(new Uri(endpoint), credential); + var chatClient = client.GetChatClient(deploymentName); + + var chatCompletionOptions = new ChatCompletionOptions(); + var completion = await chatClient.CompleteChatAsync( + new[] { new UserChatMessage(request.Prompt) }, + chatCompletionOptions + ); + + string responseText = completion.Value.Content[0].Text; + if (string.IsNullOrEmpty(responseText)) + { + _logger.LogError("Azure OpenAI API returned no text."); + return BadRequest("No response from Azure OpenAI."); + } + + _logger.LogInformation("Azure OpenAI response received: {Response}", responseText); + return Json(responseText); + } + catch (Exception ex) + { + _logger.LogError("Exception in Azure OpenAI call: {Message}", ex.Message); + return BadRequest($"Error generating response: {ex.Message}"); + } + } + } +} \ No newline at end of file diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/tagHelper b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/tagHelper new file mode 100644 index 0000000000..4640eb84bf --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/stt/tagHelper @@ -0,0 +1,249 @@ +@model IndexModel +@using Syncfusion.EJ2.InteractiveChat + +
+ + + + +
+ + + + + + + + \ No newline at end of file diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/razor b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/razor new file mode 100644 index 0000000000..dd184a6dc8 --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/razor @@ -0,0 +1,154 @@ +@using Syncfusion.EJ2.InteractiveChat + +
+ @Html.EJS().AIAssistView("aiAssistView").BannerTemplate("#bannerContent").StopRespondingClick("stopRespondingClick").PromptRequest("onPromptRequest").Created("onCreated").ToolbarSettings(new AIAssistViewToolbarSettings() + { + Items = ViewBag.Items, + ItemClicked = "toolbarItemClicked" + }).ResponseToolbarSettings(new AIAssistViewResponseToolbarSettings() + { + Items = ViewBag.ResponseItems, + ItemClicked = "onResponseToolbarItemClicked" + }).Render() +
+ + +@Html.AntiForgeryToken() + + + + + + + + \ No newline at end of file diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/tagHelper b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/tagHelper new file mode 100644 index 0000000000..fdfd26e68e --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/tagHelper @@ -0,0 +1,150 @@ +@model IndexModel +@using Syncfusion.EJ2.InteractiveChat + +
+ + + + +
+ + + + + + + \ No newline at end of file diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/texttospeechcore.cs b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/texttospeechcore.cs new file mode 100644 index 0000000000..2c1d75f7d9 --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/texttospeechcore.cs @@ -0,0 +1,89 @@ +using Azure; +using Azure.AI.OpenAI; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.RazorPages; +using OpenAI.Chat; + +namespace WebApplication.Pages +{ + public class IndexModel : PageModel + { + + public IndexViewModel ViewModel { get; set; } = new IndexViewModel(); + public void OnGet() + { + ViewModel.Items = new List + { + new ToolbarItemModel { iconCss = "e-icons e-refresh", align = "Right" } + }; + + ViewModel.ResponseItems = new List + { + new ToolbarItemModel { iconCss = "e-icons e-assist-copy", tooltip = "Copy" }, + new ToolbarItemModel { iconCss = "e-icons e-audio", tooltip = "Read Aloud" }, + new ToolbarItemModel { iconCss = "e-icons e-assist-like", tooltip = "Like" }, + new ToolbarItemModel { iconCss = "e-icons e-assist-dislike", tooltip = "Need Improvement" } + }; + } + + public async Task OnPostGetAIResponse([FromBody] PromptRequest request) + { + try + { + _logger.LogInformation("Received request with prompt: {Prompt}", request?.Prompt); + + if (string.IsNullOrEmpty(request?.Prompt)) + { + _logger.LogWarning("Prompt is null or empty."); + return BadRequest("Prompt cannot be empty."); + } + + string endpoint = "Your_Azure_OpenAI_Endpoint"; // Replace with your Azure OpenAI endpoint + string apiKey = "YOUR_AZURE_OPENAI_API_KEY"; // Replace with your Azure OpenAI API key + string deploymentName = "YOUR_DEPLOYMENT_NAME"; // Replace with your Azure OpenAI deployment name (e.g., gpt-4o-mini) + + var credential = new AzureKeyCredential(apiKey); + var client = new AzureOpenAIClient(new Uri(endpoint), credential); + var chatClient = client.GetChatClient(deploymentName); + + var chatCompletionOptions = new ChatCompletionOptions(); + var completion = await chatClient.CompleteChatAsync( + new[] { new UserChatMessage(request.Prompt) }, + chatCompletionOptions + ); + string responseText = completion.Value.Content[0].Text; + if (string.IsNullOrEmpty(responseText)) + { + _logger.LogError("Azure OpenAI API returned no text."); + return BadRequest("No response from Azure OpenAI."); + } + + _logger.LogInformation("Azure OpenAI response received: {Response}", responseText); + return new JsonResult(responseText); + } + catch (Exception ex) + { + _logger.LogError("Exception in Azure OpenAI call: {Message}", ex.Message); + return BadRequest($"Error generating response: {ex.Message}"); + } + } + } + + public class IndexViewModel + { + public List Items { get; set; } = new List(); + public List ResponseItems { get; set; } = new List(); + } + + public class PromptRequest + { + public string Prompt { get; set; } + } + + public class ToolbarItemModel + { + public string align { get; set; } + public string iconCss { get; set; } + public string tooltip { get; set; } + } +} diff --git a/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/texttospeechmvc.cs b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/texttospeechmvc.cs new file mode 100644 index 0000000000..fda7540088 --- /dev/null +++ b/ej2-asp-core-mvc/code-snippet/ai-assistview/speech/tts/texttospeechmvc.cs @@ -0,0 +1,79 @@ +using OpenAI; +using OpenAI.Chat; +using Azure; +using Azure.AI.OpenAI; + +namespace AssistViewDemo.Controllers +{ + public class HomeController : Controller + { + + public List Items { get; set; } = new List(); + public List ResponseItems { get; set; } = new List(); + + public IActionResult Index() + { + Items.Add(new ToolbarItemModel { iconCss = "e-icons e-refresh", align = "Right" }); + ResponseItems = new List + { + new ToolbarItemModel { iconCss = "e-icons e-assist-copy", tooltip = "Copy" }, + new ToolbarItemModel { iconCss = "e-icons e-audio", tooltip = "Read Aloud" }, + new ToolbarItemModel { iconCss = "e-icons e-assist-like", tooltip = "Like" }, + new ToolbarItemModel { iconCss = "e-icons e-assist-dislike", tooltip = "Need Improvement" } + }; + ViewBag.Items = Items; + ViewBag.ResponseItems = ResponseItems; + return View(); + } + public class ToolbarItemModel + { + public string iconCss { get; set; } + public string align { get; set; } + public string tooltip { get; set; } + } + [HttpPost] + public async Task GetAIResponse([FromBody] PromptRequest request) + { + try + { + _logger.LogInformation("Received request with prompt: {Prompt}", request?.Prompt); + + if (string.IsNullOrEmpty(request?.Prompt)) + { + _logger.LogWarning("Prompt is null or empty."); + return BadRequest("Prompt cannot be empty."); + } + + // Azure OpenAI configuration + string endpoint = "Your_Azure_OpenAI_Endpoint"; // Replace with your Azure OpenAI endpoint + string apiKey = "YOUR_AZURE_OPENAI_API_KEY"; // Replace with your Azure OpenAI API key + string deploymentName = "YOUR_DEPLOYMENT_NAME"; // Replace with your Azure OpenAI deployment name (e.g., gpt-4o-mini) + + var credential = new AzureKeyCredential(apiKey); + var client = new AzureOpenAIClient(new Uri(endpoint), credential); + var chatClient = client.GetChatClient(deploymentName); + + var chatCompletionOptions = new ChatCompletionOptions(); + var completion = await chatClient.CompleteChatAsync( + new[] { new UserChatMessage(request.Prompt) }, + chatCompletionOptions + ); + + string responseText = completion.Value.Content[0].Text; + if (string.IsNullOrEmpty(responseText)) + { + _logger.LogError("Azure OpenAI API returned no text."); + return BadRequest("No response from Azure OpenAI."); + } + + _logger.LogInformation("Azure OpenAI response received: {Response}", responseText); + return Json(responseText); + } + catch (Exception ex) + { + _logger.LogError("Exception in Azure OpenAI call: {Message}", ex.Message); + return BadRequest($"Error generating response: {ex.Message}"); + } + } + } +} \ No newline at end of file diff --git a/ej2-asp-core-toc.html b/ej2-asp-core-toc.html index bd87913c18..08708f5284 100644 --- a/ej2-asp-core-toc.html +++ b/ej2-asp-core-toc.html @@ -217,6 +217,16 @@
  • Custom views
  • File attachments
  • Templates
  • +
  • Speech + +
  • Appearance
  • Accessibility
  • Methods
  • diff --git a/ej2-asp-mvc-toc.html b/ej2-asp-mvc-toc.html index 8895e097af..03ee566818 100644 --- a/ej2-asp-mvc-toc.html +++ b/ej2-asp-mvc-toc.html @@ -171,6 +171,16 @@
  • Custom views
  • File attachments
  • Templates
  • +
  • Speech + +
  • Appearance
  • Accessibility
  • Methods