From c25c72ed30b3ad747c4edc6bdf3706d61ce8a94c Mon Sep 17 00:00:00 2001 From: Durgesh Suryawanshi Date: Wed, 17 Apr 2024 12:59:15 +0530 Subject: [PATCH 1/6] Added ga3-test-app --- callautomation-ga3-test-app/main.py | 282 +++++++++++++++++++ callautomation-ga3-test-app/readme.md | 61 ++++ callautomation-ga3-test-app/requirements.txt | 4 + 3 files changed, 347 insertions(+) create mode 100644 callautomation-ga3-test-app/main.py create mode 100644 callautomation-ga3-test-app/readme.md create mode 100644 callautomation-ga3-test-app/requirements.txt diff --git a/callautomation-ga3-test-app/main.py b/callautomation-ga3-test-app/main.py new file mode 100644 index 0000000..2d6c37a --- /dev/null +++ b/callautomation-ga3-test-app/main.py @@ -0,0 +1,282 @@ + +from pyexpat import model +import uuid +from urllib.parse import urlencode, urljoin +from azure.eventgrid import EventGridEvent, SystemEventNames +import requests +from flask import Flask, Response, request, json +from logging import INFO +import re +from azure.communication.callautomation import ( + CallAutomationClient, + PhoneNumberIdentifier, + RecognizeInputType, + TextSource + ) +from azure.core.messaging import CloudEvent +import openai + +from openai.api_resources import ( + ChatCompletion +) + +# Your ACS resource connection string +ACS_CONNECTION_STRING = "" + +# Cognitive service endpoint +COGNITIVE_SERVICE_ENDPOINT="" + +# Cognitive service endpoint +AZURE_OPENAI_SERVICE_KEY = "" + +# Open AI service endpoint +AZURE_OPENAI_SERVICE_ENDPOINT="" + +# Azure Open AI Deployment Model Name +AZURE_OPENAI_DEPLOYMENT_MODEL_NAME="" + +# Azure Open AI Deployment Model +AZURE_OPENAI_DEPLOYMENT_MODEL="gpt-3.5-turbo" + +# Agent Phone Number +AGENT_PHONE_NUMBER="" + +# Callback events URI to handle callback events. +CALLBACK_URI_HOST = "" +CALLBACK_EVENTS_URI = CALLBACK_URI_HOST + "/api/callbacks" + +ANSWER_PROMPT_SYSTEM_TEMPLATE = """ + You are an assistant designed to answer the customer query and analyze the sentiment score from the customer tone. + You also need to determine the intent of the customer query and classify it into categories such as sales, marketing, shopping, etc. + Use a scale of 1-10 (10 being highest) to rate the sentiment score. + Use the below format, replacing the text in brackets with the result. Do not include the brackets in the output: + Content:[Answer the customer query briefly and clearly in two lines and ask if there is anything else you can help with] + Score:[Sentiment score of the customer tone] + Intent:[Determine the intent of the customer query] + Category:[Classify the intent into one of the categories] + """ + +HELLO_PROMPT = "Hello, thank you for calling! How can I help you today?" +TIMEOUT_SILENCE_PROMPT = "I am sorry, I did not hear anything. If you need assistance, please let me know how I can help you," +GOODBYE_PROMPT = "Thank you for calling! I hope I was able to assist you. Have a great day!" +CONNECT_AGENT_PROMPT = "I'm sorry, I was not able to assist you with your request. Let me transfer you to an agent who can help you further. Please hold the line, and I willl connect you shortly." +CALLTRANSFER_FAILURE_PROMPT = "It looks like I can not connect you to an agent right now, but we will get the next available agent to call you back as soon as possible." +AGENT_PHONE_NUMBER_EMPTY_PROMPT = "I am sorry, we are currently experiencing high call volumes and all of our agents are currently busy. Our next available agent will call you back as soon as possible." +END_CALL_PHRASE_TO_CONNECT_AGENT = "Sure, please stay on the line. I am going to transfer you to an agent." + +TRANSFER_FAILED_CONTEXT = "TransferFailed" +CONNECT_AGENT_CONTEXT = "ConnectAgent" +GOODBYE_CONTEXT = "Goodbye" + +CHAT_RESPONSE_EXTRACT_PATTERN = r"\s*Content:(.*)\s*Score:(.*\d+)\s*Intent:(.*)\s*Category:(.*)" + +call_automation_client = CallAutomationClient.from_connection_string(ACS_CONNECTION_STRING) + +recording_id = None +recording_chunks_location = [] +max_retry = 2 + +openai.api_key = AZURE_OPENAI_SERVICE_KEY +openai.api_base = AZURE_OPENAI_SERVICE_ENDPOINT # your endpoint should look like the following https://YOUR_RESOURCE_NAME.openai.azure.com/ +openai.api_type = 'azure' +openai.api_version = '2023-05-15' # this may change in the future + +app = Flask(__name__) + +def get_chat_completions_async(system_prompt,user_prompt): + openai.api_key = AZURE_OPENAI_SERVICE_KEY + openai.api_base = AZURE_OPENAI_SERVICE_ENDPOINT # your endpoint should look like the following https://YOUR_RESOURCE_NAME.openai.azure.com/ + openai.api_type = 'azure' + openai.api_version = '2023-05-15' # this may change in the future + + # Define your chat completions request + chat_request = [ + {"role": "system", "content": f"{system_prompt}"}, + {"role": "user", "content": f"In less than 200 characters: respond to this question: {user_prompt}?"} + ] + + global response_content + try: + response = ChatCompletion.create(model=AZURE_OPENAI_DEPLOYMENT_MODEL,deployment_id=AZURE_OPENAI_DEPLOYMENT_MODEL_NAME, messages=chat_request,max_tokens = 1000) + except ex: + app.logger.info("error in openai api call : %s",ex) + + # Extract the response content + if response is not None : + response_content = response['choices'][0]['message']['content'] + else : + response_content="" + return response_content + +def get_chat_gpt_response(speech_input): + return get_chat_completions_async(ANSWER_PROMPT_SYSTEM_TEMPLATE,speech_input) + +def handle_recognize(replyText,callerId,call_connection_id,context=""): + play_source = TextSource(text=replyText, voice_name="en-US-NancyNeural") + recognize_result=call_automation_client.get_call_connection(call_connection_id).start_recognizing_media( + input_type=RecognizeInputType.SPEECH, + target_participant=PhoneNumberIdentifier(callerId), + end_silence_timeout=10, + play_prompt=play_source, + operation_context=context) + app.logger.info("handle_recognize : data=%s",recognize_result) + +def handle_play(call_connection_id, text_to_play, context): + play_source = TextSource(text=text_to_play, voice_name= "en-US-NancyNeural") + call_automation_client.get_call_connection(call_connection_id).play_media_to_all(play_source, + operation_context=context) + +def handle_hangup(call_connection_id): + call_automation_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) + +def detect_escalate_to_agent_intent(speech_text, logger): + return has_intent_async(user_query=speech_text, intent_description="talk to agent", logger=logger) + +def has_intent_async(user_query, intent_description, logger): + is_match=False + system_prompt = "You are a helpful assistant" + combined_prompt = f"In 1 word: does {user_query} have a similar meaning as {intent_description}?" + #combined_prompt = base_user_prompt.format(user_query, intent_description) + response = get_chat_completions_async(system_prompt, combined_prompt) + if "yes" in response.lower(): + is_match =True + logger.info(f"OpenAI results: is_match={is_match}, customer_query='{user_query}', intent_description='{intent_description}'") + return is_match + +def get_sentiment_score(sentiment_score): + pattern = r"(\d)+" + regex = re.compile(pattern) + match = regex.search(sentiment_score) + return int(match.group()) if match else -1 + +@app.route("/api/incomingCall", methods=['POST']) +def incoming_call_handler(): + for event_dict in request.json: + event = EventGridEvent.from_dict(event_dict) + app.logger.info("incoming event data --> %s", event.data) + if event.event_type == SystemEventNames.EventGridSubscriptionValidationEventName: + app.logger.info("Validating subscription") + validation_code = event.data['validationCode'] + validation_response = {'validationResponse': validation_code} + return Response(response=json.dumps(validation_response), status=200) + elif event.event_type =="Microsoft.Communication.IncomingCall": + app.logger.info("Incoming call received: data=%s", + event.data) + if event.data['from']['kind'] =="phoneNumber": + caller_id = event.data['from']["phoneNumber"]["value"] + else : + caller_id = event.data['from']['rawId'] + app.logger.info("incoming call handler caller id: %s", + caller_id) + incoming_call_context=event.data['incomingCallContext'] + guid =uuid.uuid4() + query_parameters = urlencode({"callerId": caller_id}) + callback_uri = f"{CALLBACK_EVENTS_URI}/{guid}?{query_parameters}" + + app.logger.info("callback url: %s", callback_uri) + + answer_call_result = call_automation_client.answer_call(incoming_call_context=incoming_call_context, + cognitive_services_endpoint=COGNITIVE_SERVICE_ENDPOINT, + callback_url=callback_uri) + app.logger.info("Answered call for connection id: %s", + answer_call_result.call_connection_id) + return Response(status=200) + +@app.route("/api/callbacks/", methods=["POST"]) +def handle_callback(contextId): + try: + global caller_id , call_connection_id + app.logger.info("Request Json: %s", request.json) + for event_dict in request.json: + event = CloudEvent.from_dict(event_dict) + call_connection_id = event.data['callConnectionId'] + + app.logger.info("%s event received for call connection id: %s", event.type, call_connection_id) + caller_id = request.args.get("callerId").strip() + if "+" not in caller_id: + caller_id="+".strip()+caller_id.strip() + + app.logger.info("call connected : data=%s", event.data) + if event.type == "Microsoft.Communication.CallConnected": + handle_recognize(HELLO_PROMPT, + caller_id,call_connection_id, + context="GetFreeFormText") + + elif event.type == "Microsoft.Communication.RecognizeCompleted": + if event.data['recognitionType'] == "speech": + speech_text = event.data['speechResult']['speech']; + app.logger.info("Recognition completed, speech_text =%s", + speech_text); + if speech_text is not None and len(speech_text) > 0: + if detect_escalate_to_agent_intent(speech_text=speech_text,logger=app.logger): + handle_play(call_connection_id=call_connection_id,text_to_play=END_CALL_PHRASE_TO_CONNECT_AGENT,context=CONNECT_AGENT_CONTEXT) + else: + chat_gpt_response= get_chat_gpt_response(speech_text) + app.logger.info(f"Chat GPT response:{chat_gpt_response}") + regex = re.compile(CHAT_RESPONSE_EXTRACT_PATTERN) + match = regex.search(chat_gpt_response) + if match: + answer = match.group(1) + sentiment_score = match.group(2).strip() + intent = match.group(3) + category = match.group(4) + app.logger.info(f"Chat GPT Answer={answer}, Sentiment Rating={sentiment_score}, Intent={intent}, Category={category}") + score=get_sentiment_score(sentiment_score) + app.logger.info(f"Score={score}") + if -1 < score < 5: + app.logger.info(f"Score is less than 5") + handle_play(call_connection_id=call_connection_id,text_to_play=CONNECT_AGENT_PROMPT,context=CONNECT_AGENT_CONTEXT) + else: + app.logger.info(f"Score is more than 5") + handle_recognize(answer,caller_id,call_connection_id,context="OpenAISample") + else: + app.logger.info("No match found") + handle_recognize(chat_gpt_response,caller_id,call_connection_id,context="OpenAISample") + + elif event.type == "Microsoft.Communication.RecognizeFailed": + resultInformation = event.data['resultInformation'] + reasonCode = resultInformation['subCode'] + context=event.data['operationContext'] + global max_retry + if reasonCode == 8510 and 0 < max_retry: + handle_recognize(TIMEOUT_SILENCE_PROMPT,caller_id,call_connection_id) + max_retry -= 1 + else: + handle_play(call_connection_id,GOODBYE_PROMPT, GOODBYE_CONTEXT) + + elif event.type == "Microsoft.Communication.PlayCompleted": + context=event.data['operationContext'] + if context.lower() == TRANSFER_FAILED_CONTEXT.lower() or context.lower() == GOODBYE_CONTEXT.lower() : + handle_hangup(call_connection_id) + elif context.lower() == CONNECT_AGENT_CONTEXT.lower(): + if not AGENT_PHONE_NUMBER or AGENT_PHONE_NUMBER.isspace(): + app.logger.info(f"Agent phone number is empty") + handle_play(call_connection_id=call_connection_id,text_to_play=AGENT_PHONE_NUMBER_EMPTY_PROMPT) + else: + app.logger.info(f"Initializing the Call transfer...") + transfer_destination=PhoneNumberIdentifier(AGENT_PHONE_NUMBER) + call_connection_client =call_automation_client.get_call_connection(call_connection_id=call_connection_id) + call_connection_client.transfer_call_to_participant(target_participant=transfer_destination) + app.logger.info(f"Transfer call initiated: {context}") + + elif event.type == "Microsoft.Communication.CallTransferAccepted": + app.logger.info(f"Call transfer accepted event received for connection id: {call_connection_id}") + + elif event.type == "Microsoft.Communication.CallTransferFailed": + app.logger.info(f"Call transfer failed event received for connection id: {call_connection_id}") + resultInformation = event.data['resultInformation'] + sub_code = resultInformation['subCode'] + # check for message extraction and code + app.logger.info(f"Encountered error during call transfer, message=, code=, subCode={sub_code}") + handle_play(call_connection_id=call_connection_id,text_to_play=CALLTRANSFER_FAILURE_PROMPT, context=TRANSFER_FAILED_CONTEXT) + return Response(status=200) + except Exception as ex: + app.logger.info("error in event handling") + +@app.route("/") +def hello(): + return "Hello ACS CallAutomation!..test" + +if __name__ == '__main__': + app.logger.setLevel(INFO) + app.run(port=8080) diff --git a/callautomation-ga3-test-app/readme.md b/callautomation-ga3-test-app/readme.md new file mode 100644 index 0000000..9d741ad --- /dev/null +++ b/callautomation-ga3-test-app/readme.md @@ -0,0 +1,61 @@ +|page_type| languages |products +|---|-----------------------------------------|---| +|sample|
Python
|
azureazure-communication-services
| + +# Call Automation - Quick Start Sample + +This is a sample application demonstrated during Microsoft Build 2023. It highlights an integration of Azure Communication Services with Azure OpenAI Service to enable intelligent conversational agents. + +## Prerequisites + +- Create an Azure account with an active subscription. For details, see [Create an account for free](https://azure.microsoft.com/free/) +- Create an Azure Communication Services resource. For details, see [Create an Azure Communication Resource](https://docs.microsoft.com/azure/communication-services/quickstarts/create-communication-resource). You'll need to record your resource **connection string** for this sample. +- An Calling-enabled telephone number. [Get a phone number](https://learn.microsoft.com/en-us/azure/communication-services/quickstarts/telephony/get-phone-number?tabs=windows&pivots=platform-azp). +- Azure Dev Tunnels CLI. For details, see [Enable dev tunnel](https://docs.tunnels.api.visualstudio.com/cli) +- Create an Azure Cognitive Services resource. For details, see [Create an Azure Cognitive Services Resource](https://learn.microsoft.com/en-us/azure/cognitive-services/cognitive-services-apis-create-account) +- An Azure OpenAI Resource and Deployed Model. See [instructions](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal). +- Create and host a Azure Dev Tunnel. Instructions [here](https://learn.microsoft.com/en-us/azure/developer/dev-tunnels/get-started) +- [Python](https://www.python.org/downloads/) 3.7 or above. + +## Before running the sample for the first time + +1. Open an instance of PowerShell, Windows Terminal, Command Prompt or equivalent and navigate to the directory that you would like to clone the sample to. +2. git clone `https://github.com/Azure-Samples/communication-services-python-quickstarts.git`. +3. Navigate to `callautomation-openai-sample` folder and open `main.py` file. + +### Setup the Python environment + +Create and activate python virtual environment and install required packages using following command +``` +pip install -r requirements.txt +``` + +### Setup and host your Azure DevTunnel + +[Azure DevTunnels](https://learn.microsoft.com/en-us/azure/developer/dev-tunnels/overview) is an Azure service that enables you to share local web services hosted on the internet. Use the commands below to connect your local development environment to the public internet. This creates a tunnel with a persistent endpoint URL and which allows anonymous access. We will then use this endpoint to notify your application of calling events from the ACS Call Automation service. + +```bash +devtunnel create --allow-anonymous +devtunnel port create -p 8080 +devtunnel host +``` + +### Configuring application + +Open `main.py` file to configure the following settings + +1. - `CALLBACK_URI_HOST`: your dev tunnel endpoint +2. - `COGNITIVE_SERVICE_ENDPOINT`: The Cognitive Services endpoint +3. - `ACS_CONNECTION_STRING`: Azure Communication Service resource's connection string. +4. - `AZURE_OPENAI_SERVICE_KEY`: Open AI's Service Key +5. - `AZURE_OPENAI_SERVICE_ENDPOINT`: Open AI's Service Endpoint +6. - `AZURE_OPENAI_DEPLOYMENT_MODEL_NAME`: Open AI's Model name +6. - `AGENT_PHONE_NUMBER`: Agent Phone Number to transfer call + +## Run app locally + +1. Navigate to `callautomation-openai-sample` folder and run `main.py` in debug mode or use command `python ./main.py` to run it from PowerShell, Command Prompt or Unix Terminal +2. Browser should pop up with the below page. If not navigate it to `http://localhost:8080/` or your dev tunnel url. +3. Register an EventGrid Webhook for the IncomingCall Event that points to your DevTunnel URI. Instructions [here](https://learn.microsoft.com/en-us/azure/communication-services/concepts/call-automation/incoming-call-notification). + +Once that's completed you should have a running application. The best way to test this is to place a call to your ACS phone number and talk to your intelligent agent. diff --git a/callautomation-ga3-test-app/requirements.txt b/callautomation-ga3-test-app/requirements.txt new file mode 100644 index 0000000..94dc155 --- /dev/null +++ b/callautomation-ga3-test-app/requirements.txt @@ -0,0 +1,4 @@ +Flask>=2.3.2 +azure-eventgrid==4.11.0 +azure-communication-callautomation==1.1.0 +openai==0.28.1 \ No newline at end of file From 4d8c075c920800f26be0bbf769c008c5472722f1 Mon Sep 17 00:00:00 2001 From: Durgesh Suryawanshi Date: Wed, 17 Apr 2024 14:42:03 +0530 Subject: [PATCH 2/6] ga3 app initial changes --- callautomation-ga3-test-app/main.py | 195 ++++-------------- callautomation-ga3-test-app/requirements.txt | 3 +- .../template/index.html | 18 ++ 3 files changed, 58 insertions(+), 158 deletions(-) create mode 100644 callautomation-ga3-test-app/template/index.html diff --git a/callautomation-ga3-test-app/main.py b/callautomation-ga3-test-app/main.py index 2d6c37a..60849b0 100644 --- a/callautomation-ga3-test-app/main.py +++ b/callautomation-ga3-test-app/main.py @@ -4,112 +4,59 @@ from urllib.parse import urlencode, urljoin from azure.eventgrid import EventGridEvent, SystemEventNames import requests -from flask import Flask, Response, request, json +from flask import Flask, Response, request, json,render_template,redirect from logging import INFO import re from azure.communication.callautomation import ( CallAutomationClient, PhoneNumberIdentifier, RecognizeInputType, - TextSource + TextSource, + CommunicationUserIdentifier ) from azure.core.messaging import CloudEvent -import openai -from openai.api_resources import ( - ChatCompletion -) +COMMUNICATION_USR_ID = "" # Your ACS resource connection string -ACS_CONNECTION_STRING = "" +ACS_CONNECTION_STRING = "" # Cognitive service endpoint -COGNITIVE_SERVICE_ENDPOINT="" - -# Cognitive service endpoint -AZURE_OPENAI_SERVICE_KEY = "" - -# Open AI service endpoint -AZURE_OPENAI_SERVICE_ENDPOINT="" - -# Azure Open AI Deployment Model Name -AZURE_OPENAI_DEPLOYMENT_MODEL_NAME="" - -# Azure Open AI Deployment Model -AZURE_OPENAI_DEPLOYMENT_MODEL="gpt-3.5-turbo" +COGNITIVE_SERVICE_ENDPOINT="" # Agent Phone Number -AGENT_PHONE_NUMBER="" +TARGET_PHONE_NUMBER="" + +ACS_PHONE_NUMBER="" # Callback events URI to handle callback events. -CALLBACK_URI_HOST = "" -CALLBACK_EVENTS_URI = CALLBACK_URI_HOST + "/api/callbacks" +CALLBACK_URI_HOST = "" -ANSWER_PROMPT_SYSTEM_TEMPLATE = """ - You are an assistant designed to answer the customer query and analyze the sentiment score from the customer tone. - You also need to determine the intent of the customer query and classify it into categories such as sales, marketing, shopping, etc. - Use a scale of 1-10 (10 being highest) to rate the sentiment score. - Use the below format, replacing the text in brackets with the result. Do not include the brackets in the output: - Content:[Answer the customer query briefly and clearly in two lines and ask if there is anything else you can help with] - Score:[Sentiment score of the customer tone] - Intent:[Determine the intent of the customer query] - Category:[Classify the intent into one of the categories] - """ +CALLBACK_EVENTS_URI = CALLBACK_URI_HOST + "/api/callbacks" -HELLO_PROMPT = "Hello, thank you for calling! How can I help you today?" -TIMEOUT_SILENCE_PROMPT = "I am sorry, I did not hear anything. If you need assistance, please let me know how I can help you," -GOODBYE_PROMPT = "Thank you for calling! I hope I was able to assist you. Have a great day!" -CONNECT_AGENT_PROMPT = "I'm sorry, I was not able to assist you with your request. Let me transfer you to an agent who can help you further. Please hold the line, and I willl connect you shortly." -CALLTRANSFER_FAILURE_PROMPT = "It looks like I can not connect you to an agent right now, but we will get the next available agent to call you back as soon as possible." -AGENT_PHONE_NUMBER_EMPTY_PROMPT = "I am sorry, we are currently experiencing high call volumes and all of our agents are currently busy. Our next available agent will call you back as soon as possible." -END_CALL_PHRASE_TO_CONNECT_AGENT = "Sure, please stay on the line. I am going to transfer you to an agent." -TRANSFER_FAILED_CONTEXT = "TransferFailed" -CONNECT_AGENT_CONTEXT = "ConnectAgent" -GOODBYE_CONTEXT = "Goodbye" +TEMPLATE_FILES_PATH = "template" -CHAT_RESPONSE_EXTRACT_PATTERN = r"\s*Content:(.*)\s*Score:(.*\d+)\s*Intent:(.*)\s*Category:(.*)" +HELLO_PROMPT = "Welcome to the Contoso Utilities. Thank you!" call_automation_client = CallAutomationClient.from_connection_string(ACS_CONNECTION_STRING) recording_id = None recording_chunks_location = [] -max_retry = 2 -openai.api_key = AZURE_OPENAI_SERVICE_KEY -openai.api_base = AZURE_OPENAI_SERVICE_ENDPOINT # your endpoint should look like the following https://YOUR_RESOURCE_NAME.openai.azure.com/ -openai.api_type = 'azure' -openai.api_version = '2023-05-15' # this may change in the future +app = Flask(__name__, + template_folder=TEMPLATE_FILES_PATH) -app = Flask(__name__) - -def get_chat_completions_async(system_prompt,user_prompt): - openai.api_key = AZURE_OPENAI_SERVICE_KEY - openai.api_base = AZURE_OPENAI_SERVICE_ENDPOINT # your endpoint should look like the following https://YOUR_RESOURCE_NAME.openai.azure.com/ - openai.api_type = 'azure' - openai.api_version = '2023-05-15' # this may change in the future - - # Define your chat completions request - chat_request = [ - {"role": "system", "content": f"{system_prompt}"}, - {"role": "user", "content": f"In less than 200 characters: respond to this question: {user_prompt}?"} - ] - - global response_content - try: - response = ChatCompletion.create(model=AZURE_OPENAI_DEPLOYMENT_MODEL,deployment_id=AZURE_OPENAI_DEPLOYMENT_MODEL_NAME, messages=chat_request,max_tokens = 1000) - except ex: - app.logger.info("error in openai api call : %s",ex) - - # Extract the response content - if response is not None : - response_content = response['choices'][0]['message']['content'] - else : - response_content="" - return response_content - -def get_chat_gpt_response(speech_input): - return get_chat_completions_async(ANSWER_PROMPT_SYSTEM_TEMPLATE,speech_input) +@app.route('/createCall') +def outbound_call_handler(): + target_participant = CommunicationUserIdentifier(COMMUNICATION_USR_ID) + # source_caller = PhoneNumberIdentifier(ACS_PHONE_NUMBER) + call_connection_properties = call_automation_client.create_call(target_participant, + CALLBACK_URI_HOST, + cognitive_services_endpoint=COGNITIVE_SERVICE_ENDPOINT + ) + app.logger.info("Created call with connection id: %s", call_connection_properties.call_connection_id) + return redirect("/") def handle_recognize(replyText,callerId,call_connection_id,context=""): play_source = TextSource(text=replyText, voice_name="en-US-NancyNeural") @@ -129,25 +76,6 @@ def handle_play(call_connection_id, text_to_play, context): def handle_hangup(call_connection_id): call_automation_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) -def detect_escalate_to_agent_intent(speech_text, logger): - return has_intent_async(user_query=speech_text, intent_description="talk to agent", logger=logger) - -def has_intent_async(user_query, intent_description, logger): - is_match=False - system_prompt = "You are a helpful assistant" - combined_prompt = f"In 1 word: does {user_query} have a similar meaning as {intent_description}?" - #combined_prompt = base_user_prompt.format(user_query, intent_description) - response = get_chat_completions_async(system_prompt, combined_prompt) - if "yes" in response.lower(): - is_match =True - logger.info(f"OpenAI results: is_match={is_match}, customer_query='{user_query}', intent_description='{intent_description}'") - return is_match - -def get_sentiment_score(sentiment_score): - pattern = r"(\d)+" - regex = re.compile(pattern) - match = regex.search(sentiment_score) - return int(match.group()) if match else -1 @app.route("/api/incomingCall", methods=['POST']) def incoming_call_handler(): @@ -198,67 +126,20 @@ def handle_callback(contextId): app.logger.info("call connected : data=%s", event.data) if event.type == "Microsoft.Communication.CallConnected": - handle_recognize(HELLO_PROMPT, - caller_id,call_connection_id, - context="GetFreeFormText") + app.logger.info("Call connected") + handle_play(call_connection_id,HELLO_PROMPT,"helloContext") elif event.type == "Microsoft.Communication.RecognizeCompleted": - if event.data['recognitionType'] == "speech": - speech_text = event.data['speechResult']['speech']; - app.logger.info("Recognition completed, speech_text =%s", - speech_text); - if speech_text is not None and len(speech_text) > 0: - if detect_escalate_to_agent_intent(speech_text=speech_text,logger=app.logger): - handle_play(call_connection_id=call_connection_id,text_to_play=END_CALL_PHRASE_TO_CONNECT_AGENT,context=CONNECT_AGENT_CONTEXT) - else: - chat_gpt_response= get_chat_gpt_response(speech_text) - app.logger.info(f"Chat GPT response:{chat_gpt_response}") - regex = re.compile(CHAT_RESPONSE_EXTRACT_PATTERN) - match = regex.search(chat_gpt_response) - if match: - answer = match.group(1) - sentiment_score = match.group(2).strip() - intent = match.group(3) - category = match.group(4) - app.logger.info(f"Chat GPT Answer={answer}, Sentiment Rating={sentiment_score}, Intent={intent}, Category={category}") - score=get_sentiment_score(sentiment_score) - app.logger.info(f"Score={score}") - if -1 < score < 5: - app.logger.info(f"Score is less than 5") - handle_play(call_connection_id=call_connection_id,text_to_play=CONNECT_AGENT_PROMPT,context=CONNECT_AGENT_CONTEXT) - else: - app.logger.info(f"Score is more than 5") - handle_recognize(answer,caller_id,call_connection_id,context="OpenAISample") - else: - app.logger.info("No match found") - handle_recognize(chat_gpt_response,caller_id,call_connection_id,context="OpenAISample") + app.logger.info("Recognition completed") elif event.type == "Microsoft.Communication.RecognizeFailed": resultInformation = event.data['resultInformation'] reasonCode = resultInformation['subCode'] - context=event.data['operationContext'] - global max_retry - if reasonCode == 8510 and 0 < max_retry: - handle_recognize(TIMEOUT_SILENCE_PROMPT,caller_id,call_connection_id) - max_retry -= 1 - else: - handle_play(call_connection_id,GOODBYE_PROMPT, GOODBYE_CONTEXT) - + context=event.data['operationContext'] elif event.type == "Microsoft.Communication.PlayCompleted": - context=event.data['operationContext'] - if context.lower() == TRANSFER_FAILED_CONTEXT.lower() or context.lower() == GOODBYE_CONTEXT.lower() : - handle_hangup(call_connection_id) - elif context.lower() == CONNECT_AGENT_CONTEXT.lower(): - if not AGENT_PHONE_NUMBER or AGENT_PHONE_NUMBER.isspace(): - app.logger.info(f"Agent phone number is empty") - handle_play(call_connection_id=call_connection_id,text_to_play=AGENT_PHONE_NUMBER_EMPTY_PROMPT) - else: - app.logger.info(f"Initializing the Call transfer...") - transfer_destination=PhoneNumberIdentifier(AGENT_PHONE_NUMBER) - call_connection_client =call_automation_client.get_call_connection(call_connection_id=call_connection_id) - call_connection_client.transfer_call_to_participant(target_participant=transfer_destination) - app.logger.info(f"Transfer call initiated: {context}") - + context=event.data['operationContext'] + app.logger.info(context) + handle_hangup(call_connection_id) elif event.type == "Microsoft.Communication.CallTransferAccepted": app.logger.info(f"Call transfer accepted event received for connection id: {call_connection_id}") @@ -267,15 +148,17 @@ def handle_callback(contextId): resultInformation = event.data['resultInformation'] sub_code = resultInformation['subCode'] # check for message extraction and code - app.logger.info(f"Encountered error during call transfer, message=, code=, subCode={sub_code}") - handle_play(call_connection_id=call_connection_id,text_to_play=CALLTRANSFER_FAILURE_PROMPT, context=TRANSFER_FAILED_CONTEXT) + app.logger.info(f"Encountered error during call transfer, message=, code=, subCode={sub_code}") + elif event.type == "Microsoft.Communication.CallDisconnected": + app.logger.info(f"Received CallDisconnected event for connection id: {call_connection_id}") return Response(status=200) except Exception as ex: app.logger.info("error in event handling") -@app.route("/") -def hello(): - return "Hello ACS CallAutomation!..test" +# GET endpoint to render the menus +@app.route('/') +def index_handler(): + return render_template("index.html") if __name__ == '__main__': app.logger.setLevel(INFO) diff --git a/callautomation-ga3-test-app/requirements.txt b/callautomation-ga3-test-app/requirements.txt index 94dc155..574abca 100644 --- a/callautomation-ga3-test-app/requirements.txt +++ b/callautomation-ga3-test-app/requirements.txt @@ -1,4 +1,3 @@ Flask>=2.3.2 azure-eventgrid==4.11.0 -azure-communication-callautomation==1.1.0 -openai==0.28.1 \ No newline at end of file +azure-communication-callautomation==1.1.0 \ No newline at end of file diff --git a/callautomation-ga3-test-app/template/index.html b/callautomation-ga3-test-app/template/index.html new file mode 100644 index 0000000..4fdd830 --- /dev/null +++ b/callautomation-ga3-test-app/template/index.html @@ -0,0 +1,18 @@ + + + + + GA3 Call automation test app + + + +

Azure Communication Services

+

GA3 Call automation test app

+
+
+ +
+
+ + + \ No newline at end of file From e52c4a26639c309288ff37e973377f50fc0250cc Mon Sep 17 00:00:00 2001 From: Durgesh Suryawanshi Date: Wed, 17 Apr 2024 18:35:32 +0530 Subject: [PATCH 3/6] Added recording scenarios, pause on start, byos. --- callautomation-ga3-test-app/main.py | 134 ++++++++++++++++-- callautomation-ga3-test-app/readme.md | 8 +- .../template/index.html | 2 + 3 files changed, 128 insertions(+), 16 deletions(-) diff --git a/callautomation-ga3-test-app/main.py b/callautomation-ga3-test-app/main.py index 60849b0..a49a039 100644 --- a/callautomation-ga3-test-app/main.py +++ b/callautomation-ga3-test-app/main.py @@ -1,5 +1,6 @@ from pyexpat import model +import time import uuid from urllib.parse import urlencode, urljoin from azure.eventgrid import EventGridEvent, SystemEventNames @@ -12,7 +13,13 @@ PhoneNumberIdentifier, RecognizeInputType, TextSource, - CommunicationUserIdentifier + CommunicationUserIdentifier, + ServerCallLocator, + RecordingChannel, + RecordingContent, + RecordingFormat, + AzureBlobContainerRecordingStorage, + AzureCommunicationsRecordingStorage ) from azure.core.messaging import CloudEvent @@ -34,16 +41,18 @@ CALLBACK_EVENTS_URI = CALLBACK_URI_HOST + "/api/callbacks" - TEMPLATE_FILES_PATH = "template" +BRING_YOUR_STORAGE_URL="" + +IS_BYOS = False + +IS_PAUSE_ON_START = False + HELLO_PROMPT = "Welcome to the Contoso Utilities. Thank you!" call_automation_client = CallAutomationClient.from_connection_string(ACS_CONNECTION_STRING) -recording_id = None -recording_chunks_location = [] - app = Flask(__name__, template_folder=TEMPLATE_FILES_PATH) @@ -68,14 +77,38 @@ def handle_recognize(replyText,callerId,call_connection_id,context=""): operation_context=context) app.logger.info("handle_recognize : data=%s",recognize_result) -def handle_play(call_connection_id, text_to_play, context): +def handle_play(call_connection_id, text_to_play, context): play_source = TextSource(text=text_to_play, voice_name= "en-US-NancyNeural") call_automation_client.get_call_connection(call_connection_id).play_media_to_all(play_source, operation_context=context) def handle_hangup(call_connection_id): call_automation_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) - + +def start_recording(server_call_id): + global recording_storage + if IS_BYOS: + recording_storage=AzureBlobContainerRecordingStorage(BRING_YOUR_STORAGE_URL) + else: + recording_storage=AzureCommunicationsRecordingStorage() + + recording_result = call_automation_client.start_recording( + call_locator=ServerCallLocator(server_call_id), + recording_content_type = RecordingContent.Audio, + recording_channel_type = RecordingChannel.Unmixed, + recording_format_type = RecordingFormat.Wav, + recording_storage= recording_storage, + pause_on_start = IS_PAUSE_ON_START + ) + global recording_id + recording_id=recording_result.recording_id + app.logger.info("Recording started...") + app.logger.info("Recording Id --> %s", recording_id) + +def get_recording_state(recordingId): + recording_state_result = call_automation_client.get_recording_properties(recording_id) + app.logger.info("Recording State --> %s", recording_state_result.recording_state) + return recording_state_result.recording_state @app.route("/api/incomingCall", methods=['POST']) def incoming_call_handler(): @@ -113,12 +146,12 @@ def incoming_call_handler(): @app.route("/api/callbacks/", methods=["POST"]) def handle_callback(contextId): try: - global caller_id , call_connection_id - app.logger.info("Request Json: %s", request.json) + global caller_id , call_connection_id, server_call_id + # app.logger.info("Request Json: %s", request.json) for event_dict in request.json: event = CloudEvent.from_dict(event_dict) call_connection_id = event.data['callConnectionId'] - + app.logger.info("%s event received for call connection id: %s", event.type, call_connection_id) caller_id = request.args.get("callerId").strip() if "+" not in caller_id: @@ -127,6 +160,13 @@ def handle_callback(contextId): app.logger.info("call connected : data=%s", event.data) if event.type == "Microsoft.Communication.CallConnected": app.logger.info("Call connected") + server_call_id = event.data["serverCallId"] + app.logger.info("Server Call Id --> %s", server_call_id) + app.logger.info("Is pause on start --> %s", IS_PAUSE_ON_START) + app.logger.info("Bring Your Own Storage --> %s", IS_BYOS) + if IS_BYOS: + app.logger.info("Bring Your Own Storage URL --> %s", BRING_YOUR_STORAGE_URL) + start_recording(server_call_id) handle_play(call_connection_id,HELLO_PROMPT,"helloContext") elif event.type == "Microsoft.Communication.RecognizeCompleted": @@ -139,6 +179,26 @@ def handle_callback(contextId): elif event.type == "Microsoft.Communication.PlayCompleted": context=event.data['operationContext'] app.logger.info(context) + + recording_state = get_recording_state(recording_id) + if recording_state == "active": + call_automation_client.pause_recording(recording_id) + time.sleep(5) + get_recording_state(recording_id) + app.logger.info("Recording is paused") + time.sleep(5) + call_automation_client.resume_recording(recording_id) + time.sleep(5) + get_recording_state(recording_id) + app.logger.info("Recording is resumed") + else: + time.sleep(5) + call_automation_client.resume_recording(recording_id) + time.sleep(5) + get_recording_state(recording_id) + time.sleep(5) + call_automation_client.stop_recording(recording_id) + app.logger.info("Recording is stopped") handle_hangup(call_connection_id) elif event.type == "Microsoft.Communication.CallTransferAccepted": app.logger.info(f"Call transfer accepted event received for connection id: {call_connection_id}") @@ -155,6 +215,60 @@ def handle_callback(contextId): except Exception as ex: app.logger.info("error in event handling") +@app.route('/api/recordingFileStatus', methods=['POST']) +def recording_file_status(): + try: + for event_dict in request.json: + event = EventGridEvent.from_dict(event_dict) + if event.event_type == SystemEventNames.EventGridSubscriptionValidationEventName: + code = event.data['validationCode'] + if code: + data = {"validationResponse": code} + app.logger.info("Successfully Subscribed EventGrid.ValidationEvent --> " + str(data)) + return Response(response=str(data), status=200) + + if event.event_type == SystemEventNames.AcsRecordingFileStatusUpdatedEventName: + acs_recording_file_status_updated_event_data = event.data + acs_recording_chunk_info_properties = acs_recording_file_status_updated_event_data['recordingStorageInfo']['recordingChunks'][0] + app.logger.info("acsRecordingChunkInfoProperties response data --> " + str(acs_recording_chunk_info_properties)) + global content_location, metadata_location, delete_location + content_location = acs_recording_chunk_info_properties['contentLocation'] + metadata_location = acs_recording_chunk_info_properties['metadataLocation'] + delete_location = acs_recording_chunk_info_properties['deleteLocation'] + app.logger.info("CONTENT LOCATION --> %s", content_location) + app.logger.info("METADATA LOCATION --> %s", metadata_location) + app.logger.info("DELETE LOCATION --> %s", delete_location) + return Response(response="Ok") + + except Exception as ex: + app.logger.error( "Failed to get recording file") + return Response(response='Failed to get recording file', status=400) + +@app.route('/download') +def download_recording(): + try: + app.logger.info("Content location : %s", content_location) + recording_data = call_automation_client.download_recording(content_location) + with open("Recording_File.wav", "wb") as binary_file: + binary_file.write(recording_data.read()) + return redirect("/") + except Exception as ex: + app.logger.info("Failed to download recording --> " + str(ex)) + return Response(text=str(ex), status=500) + +@app.route('/downloadMetadata') +def download_metadata(): + try: + app.logger.info("Content location : %s", content_location) + recording_data = call_automation_client.download_recording(metadata_location) + with open("Recording_metadata.json", "wb") as binary_file: + binary_file.write(recording_data.read()) + return redirect("/") + except Exception as ex: + app.logger.info("Failed to download meatadata --> " + str(ex)) + return Response(text=str(ex), status=500) + + # GET endpoint to render the menus @app.route('/') def index_handler(): diff --git a/callautomation-ga3-test-app/readme.md b/callautomation-ga3-test-app/readme.md index 9d741ad..0f19e58 100644 --- a/callautomation-ga3-test-app/readme.md +++ b/callautomation-ga3-test-app/readme.md @@ -13,7 +13,6 @@ This is a sample application demonstrated during Microsoft Build 2023. It highli - An Calling-enabled telephone number. [Get a phone number](https://learn.microsoft.com/en-us/azure/communication-services/quickstarts/telephony/get-phone-number?tabs=windows&pivots=platform-azp). - Azure Dev Tunnels CLI. For details, see [Enable dev tunnel](https://docs.tunnels.api.visualstudio.com/cli) - Create an Azure Cognitive Services resource. For details, see [Create an Azure Cognitive Services Resource](https://learn.microsoft.com/en-us/azure/cognitive-services/cognitive-services-apis-create-account) -- An Azure OpenAI Resource and Deployed Model. See [instructions](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal). - Create and host a Azure Dev Tunnel. Instructions [here](https://learn.microsoft.com/en-us/azure/developer/dev-tunnels/get-started) - [Python](https://www.python.org/downloads/) 3.7 or above. @@ -47,14 +46,11 @@ Open `main.py` file to configure the following settings 1. - `CALLBACK_URI_HOST`: your dev tunnel endpoint 2. - `COGNITIVE_SERVICE_ENDPOINT`: The Cognitive Services endpoint 3. - `ACS_CONNECTION_STRING`: Azure Communication Service resource's connection string. -4. - `AZURE_OPENAI_SERVICE_KEY`: Open AI's Service Key -5. - `AZURE_OPENAI_SERVICE_ENDPOINT`: Open AI's Service Endpoint -6. - `AZURE_OPENAI_DEPLOYMENT_MODEL_NAME`: Open AI's Model name -6. - `AGENT_PHONE_NUMBER`: Agent Phone Number to transfer call + ## Run app locally -1. Navigate to `callautomation-openai-sample` folder and run `main.py` in debug mode or use command `python ./main.py` to run it from PowerShell, Command Prompt or Unix Terminal +1. Navigate to `callautomation-ga3-test-app` folder and run `main.py` in debug mode or use command `python ./main.py` to run it from PowerShell, Command Prompt or Unix Terminal 2. Browser should pop up with the below page. If not navigate it to `http://localhost:8080/` or your dev tunnel url. 3. Register an EventGrid Webhook for the IncomingCall Event that points to your DevTunnel URI. Instructions [here](https://learn.microsoft.com/en-us/azure/communication-services/concepts/call-automation/incoming-call-notification). diff --git a/callautomation-ga3-test-app/template/index.html b/callautomation-ga3-test-app/template/index.html index 4fdd830..85158c2 100644 --- a/callautomation-ga3-test-app/template/index.html +++ b/callautomation-ga3-test-app/template/index.html @@ -11,6 +11,8 @@

GA3 Call automation test app

+ +
From cc4e909055ebb055a255bd3c0fe0b56d2d9e26ef Mon Sep 17 00:00:00 2001 From: Durgesh Suryawanshi Date: Thu, 18 Apr 2024 19:31:53 +0530 Subject: [PATCH 4/6] added media scenarios --- callautomation-ga3-test-app/main.py | 275 ++++++++++++++++-- .../template/index.html | 2 + 2 files changed, 248 insertions(+), 29 deletions(-) diff --git a/callautomation-ga3-test-app/main.py b/callautomation-ga3-test-app/main.py index a49a039..6c72a14 100644 --- a/callautomation-ga3-test-app/main.py +++ b/callautomation-ga3-test-app/main.py @@ -19,7 +19,9 @@ RecordingContent, RecordingFormat, AzureBlobContainerRecordingStorage, - AzureCommunicationsRecordingStorage + AzureCommunicationsRecordingStorage, + RecognitionChoice, + DtmfTone ) from azure.core.messaging import CloudEvent @@ -34,8 +36,12 @@ # Agent Phone Number TARGET_PHONE_NUMBER="" +TARGET_PHONE_NUMBER_2="" + ACS_PHONE_NUMBER="" +ACS_PHONE_NUMBER_2="" + # Callback events URI to handle callback events. CALLBACK_URI_HOST = "" @@ -49,15 +55,27 @@ IS_PAUSE_ON_START = False +IS_REJECT_CALL = False + +IS_REDIRECT_CALL = False + +IS_TRANSFER_CALL = False + +IS_OUTBOUND_CALL = False + HELLO_PROMPT = "Welcome to the Contoso Utilities. Thank you!" +PSTN_USER_PROMPT = "Hello this is contoso recognition test please confirm or cancel to proceed further." + +DTMF_PROMPT = "Thank you for the update. Please type one two three four on your keypad to close call." + call_automation_client = CallAutomationClient.from_connection_string(ACS_CONNECTION_STRING) app = Flask(__name__, template_folder=TEMPLATE_FILES_PATH) @app.route('/createCall') -def outbound_call_handler(): +def create_call_handler(): target_participant = CommunicationUserIdentifier(COMMUNICATION_USR_ID) # source_caller = PhoneNumberIdentifier(ACS_PHONE_NUMBER) call_connection_properties = call_automation_client.create_call(target_participant, @@ -67,20 +85,71 @@ def outbound_call_handler(): app.logger.info("Created call with connection id: %s", call_connection_properties.call_connection_id) return redirect("/") -def handle_recognize(replyText,callerId,call_connection_id,context=""): - play_source = TextSource(text=replyText, voice_name="en-US-NancyNeural") - recognize_result=call_automation_client.get_call_connection(call_connection_id).start_recognizing_media( - input_type=RecognizeInputType.SPEECH, - target_participant=PhoneNumberIdentifier(callerId), - end_silence_timeout=10, - play_prompt=play_source, - operation_context=context) - app.logger.info("handle_recognize : data=%s",recognize_result) +@app.route('/createPstnCall') +def create_pstn_call(): + target_participant = PhoneNumberIdentifier(ACS_PHONE_NUMBER) + source_caller = PhoneNumberIdentifier(ACS_PHONE_NUMBER_2) + call_connection_properties = call_automation_client.create_call(target_participant, + CALLBACK_EVENTS_URI, + cognitive_services_endpoint=COGNITIVE_SERVICE_ENDPOINT, + source_caller_id_number=source_caller) + app.logger.info("Created pstn call with connection id: %s", call_connection_properties.call_connection_id) + return redirect("/") + +@app.route('/outboundCall') +def create_outbound_call(): + target_participant = PhoneNumberIdentifier(TARGET_PHONE_NUMBER) + source_caller = PhoneNumberIdentifier(ACS_PHONE_NUMBER) + call_connection_properties = call_automation_client.create_call(target_participant, + CALLBACK_EVENTS_URI, + cognitive_services_endpoint=COGNITIVE_SERVICE_ENDPOINT, + source_caller_id_number=source_caller) + app.logger.info("Created outbound call with connection id: %s", call_connection_properties.call_connection_id) + return redirect("/") + +def handle_recognize(playText,callerId,call_connection_id,context="",isDtmf=False): + choices = [ + RecognitionChoice( + label="Confirm", + phrases=[ "Confirm", "First", "One" ], + tone=DtmfTone.ONE + ), + RecognitionChoice( + label="Cancel", + phrases=[ "Cancel", "Second", "Two" ], + tone=DtmfTone.TWO + )] + + if isDtmf: + play_source = TextSource(text=playText, voice_name="en-US-NancyNeural") + recognize_result=call_automation_client.get_call_connection(call_connection_id).start_recognizing_media( + input_type=RecognizeInputType.DTMF, + target_participant=PhoneNumberIdentifier(callerId), + end_silence_timeout=10, + dtmf_max_tones_to_collect=4, + play_prompt=play_source, + operation_context=context) + else: + play_source = TextSource(text=playText, voice_name="en-US-NancyNeural") + recognize_result=call_automation_client.get_call_connection(call_connection_id).start_recognizing_media( + input_type=RecognizeInputType.CHOICES, + target_participant=PhoneNumberIdentifier(callerId), + choices=choices, + end_silence_timeout=10, + play_prompt=play_source, + operation_context=context) def handle_play(call_connection_id, text_to_play, context): play_source = TextSource(text=text_to_play, voice_name= "en-US-NancyNeural") call_automation_client.get_call_connection(call_connection_id).play_media_to_all(play_source, - operation_context=context) + operation_context=context, + loop=False) + # call_automation_client.get_call_connection(call_connection_id).play_media(play_source=play_source, + # play_to=PhoneNumberIdentifier(TARGET_PHONE_NUMBER), + # operation_context=context, + # loop=True) + + #call_automation_client.get_call_connection(call_connection_id).cancel_all_media_operations() def handle_hangup(call_connection_id): call_automation_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) @@ -110,6 +179,20 @@ def get_recording_state(recordingId): app.logger.info("Recording State --> %s", recording_state_result.recording_state) return recording_state_result.recording_state +def start_continuous_dtmf(call_connection_id): + call_automation_client.get_call_connection(call_connection_id).start_continuous_dtmf_recognition(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER)) + app.logger.info("Continuous Dtmf recognition started. press 1 on dialpad.") + +def stop_continuous_dtmf(call_connection_id): + call_automation_client.get_call_connection(call_connection_id).stop_continuous_dtmf_recognition(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER)) + app.logger.info("Continuous Dtmf recognition stopped. wait for sending dtmf tones.") + +def start_send_dtmf_tones(call_connection_id): + tones = [DtmfTone.ONE,DtmfTone.TWO] + call_automation_client.get_call_connection(call_connection_id).send_dtmf_tones(tones=tones, + target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER)) + app.logger.info("Send dtmf tone started.") + @app.route("/api/incomingCall", methods=['POST']) def incoming_call_handler(): for event_dict in request.json: @@ -135,28 +218,38 @@ def incoming_call_handler(): callback_uri = f"{CALLBACK_EVENTS_URI}/{guid}?{query_parameters}" app.logger.info("callback url: %s", callback_uri) - - answer_call_result = call_automation_client.answer_call(incoming_call_context=incoming_call_context, + + if IS_REJECT_CALL: + app.logger.info("Is Reject Call: %s", IS_REJECT_CALL) + call_automation_client.reject_call(incoming_call_context=incoming_call_context) + app.logger.info(f"Call Rejected, recject call setting is {IS_REJECT_CALL}") + elif IS_REDIRECT_CALL: + app.logger.info("Is Redirect Call: %s", IS_REDIRECT_CALL) + call_automation_client.redirect_call(incoming_call_context=incoming_call_context, + target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER)) + app.logger.info("Call redirected. Call automation has no control.") + else: + answer_call_result = call_automation_client.answer_call(incoming_call_context=incoming_call_context, cognitive_services_endpoint=COGNITIVE_SERVICE_ENDPOINT, callback_url=callback_uri) - app.logger.info("Answered call for connection id: %s", + app.logger.info("Answered call for connection id: %s", answer_call_result.call_connection_id) return Response(status=200) - +# For outbound call. +# @app.route('/api/callbacks', methods=['POST']) +# def handle_callback(): @app.route("/api/callbacks/", methods=["POST"]) def handle_callback(contextId): try: - global caller_id , call_connection_id, server_call_id + global caller_id , call_connection_id, server_call_id,call_connection_client,cor_relation_id # app.logger.info("Request Json: %s", request.json) for event_dict in request.json: event = CloudEvent.from_dict(event_dict) call_connection_id = event.data['callConnectionId'] - + cor_relation_id = event.data['correlationId'] + app.logger.info(f"***CALLCONNECTIONID*** -> {call_connection_id}") + app.logger.info(f"***CORRELATIONID*** -> {cor_relation_id}") app.logger.info("%s event received for call connection id: %s", event.type, call_connection_id) - caller_id = request.args.get("callerId").strip() - if "+" not in caller_id: - caller_id="+".strip()+caller_id.strip() - app.logger.info("call connected : data=%s", event.data) if event.type == "Microsoft.Communication.CallConnected": app.logger.info("Call connected") @@ -164,21 +257,82 @@ def handle_callback(contextId): app.logger.info("Server Call Id --> %s", server_call_id) app.logger.info("Is pause on start --> %s", IS_PAUSE_ON_START) app.logger.info("Bring Your Own Storage --> %s", IS_BYOS) + call_connection_client =call_automation_client.get_call_connection(call_connection_id=call_connection_id) if IS_BYOS: app.logger.info("Bring Your Own Storage URL --> %s", BRING_YOUR_STORAGE_URL) - start_recording(server_call_id) - handle_play(call_connection_id,HELLO_PROMPT,"helloContext") + + if IS_TRANSFER_CALL: + app.logger.info("Is Transfer Call:--> %s", IS_TRANSFER_CALL) + call_connection_client.transfer_call_to_participant(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER), + transferee=PhoneNumberIdentifier(ACS_PHONE_NUMBER_2)) + app.logger.info("Transfer call initiated.") + elif IS_OUTBOUND_CALL: + app.logger.info("Is Outbound Call:--> %s", IS_OUTBOUND_CALL) + app.logger.info("Outbound call connected.") + #start_continuous_dtmf(call_connection_id=call_connection_id) + #handle_play(call_connection_id,"this is loop test","outboundPlayContext") + #handle_hangup(call_connection_id) + else: + start_recording(server_call_id) + + call_connection_client.add_participant(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER), + source_caller_id_number=PhoneNumberIdentifier(ACS_PHONE_NUMBER), + operation_context="addPstnUserContext", + invitation_timeout=10) + app.logger.info("Adding PSTN participant") elif event.type == "Microsoft.Communication.RecognizeCompleted": app.logger.info("Recognition completed") - + app.logger.info("Recognize completed: data=%s", event.data) + if event.data['recognitionType'] == "dtmf": + tones = event.data['dtmfResult']['tones'] + app.logger.info("Recognition completed, tones=%s, context=%s", tones, event.data.get('operationContext')) + call_connection_client.remove_participant(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER)) + elif event.data['recognitionType'] == "choices": + labelDetected = event.data['choiceResult']['label']; + phraseDetected = event.data['choiceResult']['recognizedPhrase']; + app.logger.info("Recognition completed, labelDetected=%s, phraseDetected=%s, context=%s", labelDetected, phraseDetected, event.data.get('operationContext')); + if labelDetected == "Confirm": + app.logger.info("Moving towords dtmf test.") + handle_recognize(playText=DTMF_PROMPT, + callerId=TARGET_PHONE_NUMBER, + call_connection_id=call_connection_id, + context="recognizeDtmfContext",isDtmf=True) + else: + app.logger.info("Moving towords continuous dtmf & send dtmf tones test.") + start_continuous_dtmf(call_connection_id=call_connection_id) + elif event.data['recognitionType'] == "speech": + text = event.data['speechResult']['speech']; + app.logger.info("Recognition completed, text=%s, context=%s", text, event.data.get('operationContext')) + handle_hangup(call_connection_id=call_connection_id) + else: + app.logger.info("Recognition completed: data=%s", event.data); + elif event.type == "Microsoft.Communication.RecognizeFailed": resultInformation = event.data['resultInformation'] reasonCode = resultInformation['subCode'] - context=event.data['operationContext'] + context=event.data['operationContext'] + handle_recognize(playText="test", + callerId=TARGET_PHONE_NUMBER, + call_connection_id=call_connection_id, + context="retryRecognizeContext",isDtmf=False) + app.logger.info("Cancelling all media operations.") + call_automation_client.get_call_connection(call_connection_id).cancel_all_media_operations() + app.logger.info("cancel add participant test initiated.") + response = call_connection_client.add_participant(target_participant=PhoneNumberIdentifier(ACS_PHONE_NUMBER_2), + source_caller_id_number=PhoneNumberIdentifier(ACS_PHONE_NUMBER), + invitation_timeout=10) + app.logger.info(f"Invitation Id:--> {response.invitation_id}") + call_connection_client.cancel_add_participant_operation(response.invitation_id) elif event.type == "Microsoft.Communication.PlayCompleted": context=event.data['operationContext'] app.logger.info(context) + if context == "outboundPlayContext": + handle_hangup(call_connection_id=call_connection_id) + return + if context == "continuousDtmfPlayContext": + app.logger.info("test") + return recording_state = get_recording_state(recording_id) if recording_state == "active": @@ -207,8 +361,71 @@ def handle_callback(contextId): app.logger.info(f"Call transfer failed event received for connection id: {call_connection_id}") resultInformation = event.data['resultInformation'] sub_code = resultInformation['subCode'] - # check for message extraction and code - app.logger.info(f"Encountered error during call transfer, message=, code=, subCode={sub_code}") + + app.logger.info(f"Encountered error during call transfer, message=, code=, subCode={sub_code}") + elif event.type == "Microsoft.Communication.AddParticipantSucceeded": + app.logger.info(f"Received AddParticipantSucceeded event for connection id: {call_connection_id}") + if(event.data['operationContext'] == "addPstnUserContext"): + app.logger.info("PSTN user added") + participants = call_connection_client.list_participants() + app.logger.info("Participants: %s", participants) + mute_result = call_connection_client.mute_participant(CommunicationUserIdentifier(COMMUNICATION_USR_ID)) + if mute_result: + app.logger.info("Participant is muted. wating for confirming.....") + response = call_connection_client.get_participant(CommunicationUserIdentifier(COMMUNICATION_USR_ID)) + if response: + app.logger.info(f"Is participant muted: {response.is_muted}") + app.logger.info("Mute participant test completed.") + + handle_recognize(playText=PSTN_USER_PROMPT, + callerId=TARGET_PHONE_NUMBER, + call_connection_id=call_connection_id, + context="recognizeContext",isDtmf=False) + + elif event.type == "Microsoft.Communication.AddParticipantFailed": + app.logger.info(f"AddParticipantFailed event received for connection id: {call_connection_id}") + resultInformation = event.data['resultInformation'] + sub_code = resultInformation['subCode'] + handle_hangup(call_connection_id) + elif event.type == "Microsoft.Communication.CancelAddParticipantSucceeded": + app.logger.info(f"Received CancelAddParticipantSucceeded event for connection id: {call_connection_id}") + app.logger.info(f"Invitation Id:--> {event.data['invitationId']}") + app.logger.info("Cancel add participant test completed.") + handle_hangup(call_connection_id) + elif event.type == "Microsoft.Communication.CancelAddParticipantFailed": + app.logger.info(f"Received CancelAddParticipantFailed event for connection id: {call_connection_id}") + resultInformation = event.data['resultInformation'] + sub_code = resultInformation['subCode'] + handle_hangup(call_connection_id) + elif event.type == "Microsoft.Communication.ContinuousDtmfRecognitionToneReceived": + app.logger.info(f"Received ContinuousDtmfRecognitionToneReceived event for connection id: {call_connection_id}") + app.logger.info(f"Tone received:-->: {event.data['tone']}") + app.logger.info(f"Sequence Id:--> {event.data['sequenceId']}") + # handle_play(call_connection_id,HELLO_PROMPT,"continuousDtmfPlayContext") + stop_continuous_dtmf(call_connection_id=call_connection_id) + elif event.type == "Microsoft.Communication.ContinuousDtmfRecognitionToneFailed": + app.logger.info(f"Received ContinuousDtmfRecognitionToneFailed event for connection id: {call_connection_id}") + resultInformation = event.data['resultInformation'] + sub_code = resultInformation['subCode'] + handle_hangup(call_connection_id) + elif event.type == "Microsoft.Communication.ContinuousDtmfRecognitionStopped": + app.logger.info(f"Received ContinuousDtmfRecognitionStopped event for connection id: {call_connection_id}") + start_send_dtmf_tones(call_connection_id=call_connection_id) + elif event.type == "Microsoft.Communication.SendDtmfTonesCompleted": + app.logger.info(f"Received SendDtmfTonesCompleted event for connection id: {call_connection_id}") + call_connection_client.remove_participant(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER)) + app.logger.info(f"Send Dtmf tone completed. {TARGET_PHONE_NUMBER} will be removed from call.") + elif event.type == "Microsoft.Communication.SendDtmfTonesFailed": + app.logger.info(f"Received SendDtmfTonesFailed event for connection id: {call_connection_id}") + resultInformation = event.data['resultInformation'] + sub_code = resultInformation['subCode'] + elif event.type == "Microsoft.Communication.RemoveParticipantSucceeded": + app.logger.info(f"Received RemoveParticipantSucceeded event for connection id: {call_connection_id}") + handle_play(call_connection_id,HELLO_PROMPT,"helloContext") + elif event.type == "Microsoft.Communication.RemoveParticipantFailed": + app.logger.info(f"Received RemoveParticipantFailed event for connection id: {call_connection_id}") + resultInformation = event.data['resultInformation'] + sub_code = resultInformation['subCode'] elif event.type == "Microsoft.Communication.CallDisconnected": app.logger.info(f"Received CallDisconnected event for connection id: {call_connection_id}") return Response(status=200) @@ -259,7 +476,7 @@ def download_recording(): @app.route('/downloadMetadata') def download_metadata(): try: - app.logger.info("Content location : %s", content_location) + app.logger.info("Metadata location : %s", metadata_location) recording_data = call_automation_client.download_recording(metadata_location) with open("Recording_metadata.json", "wb") as binary_file: binary_file.write(recording_data.read()) diff --git a/callautomation-ga3-test-app/template/index.html b/callautomation-ga3-test-app/template/index.html index 85158c2..e3740bb 100644 --- a/callautomation-ga3-test-app/template/index.html +++ b/callautomation-ga3-test-app/template/index.html @@ -11,6 +11,8 @@

GA3 Call automation test app

+ +
From f9b3fcbe492cd3842db3a2db0f8783ee6d5babbb Mon Sep 17 00:00:00 2001 From: Durgesh Suryawanshi Date: Fri, 19 Apr 2024 18:43:16 +0530 Subject: [PATCH 5/6] Added signalling scenarios --- callautomation-ga3-test-app/main.py | 59 ++++++++++++++++--- .../template/index.html | 1 + 2 files changed, 51 insertions(+), 9 deletions(-) diff --git a/callautomation-ga3-test-app/main.py b/callautomation-ga3-test-app/main.py index 6c72a14..6986ee1 100644 --- a/callautomation-ga3-test-app/main.py +++ b/callautomation-ga3-test-app/main.py @@ -27,6 +27,8 @@ COMMUNICATION_USR_ID = "" +COMMUNICATION_USR_ID_2="" + # Your ACS resource connection string ACS_CONNECTION_STRING = "" @@ -106,7 +108,17 @@ def create_outbound_call(): source_caller_id_number=source_caller) app.logger.info("Created outbound call with connection id: %s", call_connection_properties.call_connection_id) return redirect("/") - +@app.route('/createGroupCall') +def create_group_call(): + target_participant = CommunicationUserIdentifier(COMMUNICATION_USR_ID) + target_participant_2 = CommunicationUserIdentifier(COMMUNICATION_USR_ID_2) + call_connection_properties = call_automation_client.create_group_call([target_participant,target_participant_2], + callback_url=CALLBACK_EVENTS_URI, + cognitive_services_endpoint=COGNITIVE_SERVICE_ENDPOINT, + ) + app.logger.info("Created group call with connection id: %s", call_connection_properties.call_connection_id) + return redirect("/") + def handle_recognize(playText,callerId,call_connection_id,context="",isDtmf=False): choices = [ RecognitionChoice( @@ -131,14 +143,14 @@ def handle_recognize(playText,callerId,call_connection_id,context="",isDtmf=Fals operation_context=context) else: play_source = TextSource(text=playText, voice_name="en-US-NancyNeural") - recognize_result=call_automation_client.get_call_connection(call_connection_id).start_recognizing_media( + recognize_result=call_automation_client.get_call_connection(call_connection_id).start_recognizing_media( input_type=RecognizeInputType.CHOICES, target_participant=PhoneNumberIdentifier(callerId), choices=choices, end_silence_timeout=10, play_prompt=play_source, operation_context=context) - + #SPEECH_OR_DTMF,SPEECH,CHOICES def handle_play(call_connection_id, text_to_play, context): play_source = TextSource(text=text_to_play, voice_name= "en-US-NancyNeural") call_automation_client.get_call_connection(call_connection_id).play_media_to_all(play_source, @@ -148,8 +160,8 @@ def handle_play(call_connection_id, text_to_play, context): # play_to=PhoneNumberIdentifier(TARGET_PHONE_NUMBER), # operation_context=context, # loop=True) - - #call_automation_client.get_call_connection(call_connection_id).cancel_all_media_operations() + # time.sleep(5) + # call_automation_client.get_call_connection(call_connection_id).cancel_all_media_operations() def handle_hangup(call_connection_id): call_automation_client.get_call_connection(call_connection_id).hang_up(is_for_everyone=True) @@ -269,8 +281,30 @@ def handle_callback(contextId): elif IS_OUTBOUND_CALL: app.logger.info("Is Outbound Call:--> %s", IS_OUTBOUND_CALL) app.logger.info("Outbound call connected.") - #start_continuous_dtmf(call_connection_id=call_connection_id) + + # Cancel add participant test. + # app.logger.info("Cancel add participant test initiated.") + # response = call_connection_client.add_participant(target_participant=PhoneNumberIdentifier(ACS_PHONE_NUMBER_2), + # source_caller_id_number=PhoneNumberIdentifier(ACS_PHONE_NUMBER), + # invitation_timeout=10 + # ) + # app.logger.info(f"Invitation Id:--> {response.invitation_id}") + # call_connection_client.cancel_add_participant_operation(response.invitation_id + # # operation_context="cancelAddParticipantContext" + # ) + # Cancel add participant test end + + # Transfer call test + # call_connection_client.transfer_call_to_participant(target_participant=PhoneNumberIdentifier(TARGET_PHONE_NUMBER_2), + # transferee=PhoneNumberIdentifier(TARGET_PHONE_NUMBER), + # operation_context="transferCallContext") + # app.logger.info("Transfer call initiated.") + # Transfer call test end. + + start_continuous_dtmf(call_connection_id=call_connection_id) + #handle_play(call_connection_id,"this is loop test","outboundPlayContext") + #handle_hangup(call_connection_id) else: start_recording(server_call_id) @@ -356,7 +390,7 @@ def handle_callback(contextId): handle_hangup(call_connection_id) elif event.type == "Microsoft.Communication.CallTransferAccepted": app.logger.info(f"Call transfer accepted event received for connection id: {call_connection_id}") - + app.logger.info(f"Operation context:--> {event.data['operationContext']}") elif event.type == "Microsoft.Communication.CallTransferFailed": app.logger.info(f"Call transfer failed event received for connection id: {call_connection_id}") resultInformation = event.data['resultInformation'] @@ -368,10 +402,14 @@ def handle_callback(contextId): if(event.data['operationContext'] == "addPstnUserContext"): app.logger.info("PSTN user added") participants = call_connection_client.list_participants() - app.logger.info("Participants: %s", participants) + app.logger.info("Listing participants in call...") + for page in participants.by_page(): + for participant in page: + app.logger.info("Participant: %s", participant.identifier.raw_id) mute_result = call_connection_client.mute_participant(CommunicationUserIdentifier(COMMUNICATION_USR_ID)) if mute_result: app.logger.info("Participant is muted. wating for confirming.....") + time.sleep(5) response = call_connection_client.get_participant(CommunicationUserIdentifier(COMMUNICATION_USR_ID)) if response: app.logger.info(f"Is participant muted: {response.is_muted}") @@ -390,18 +428,21 @@ def handle_callback(contextId): elif event.type == "Microsoft.Communication.CancelAddParticipantSucceeded": app.logger.info(f"Received CancelAddParticipantSucceeded event for connection id: {call_connection_id}") app.logger.info(f"Invitation Id:--> {event.data['invitationId']}") + # app.logger.info(f"Operation context:--> {event.data['operationContext']}") app.logger.info("Cancel add participant test completed.") handle_hangup(call_connection_id) elif event.type == "Microsoft.Communication.CancelAddParticipantFailed": app.logger.info(f"Received CancelAddParticipantFailed event for connection id: {call_connection_id}") resultInformation = event.data['resultInformation'] sub_code = resultInformation['subCode'] + app.logger.info(f"Result Information:--> {resultInformation}") + app.logger.info(f"Sub code:--> {sub_code}") handle_hangup(call_connection_id) elif event.type == "Microsoft.Communication.ContinuousDtmfRecognitionToneReceived": app.logger.info(f"Received ContinuousDtmfRecognitionToneReceived event for connection id: {call_connection_id}") app.logger.info(f"Tone received:-->: {event.data['tone']}") app.logger.info(f"Sequence Id:--> {event.data['sequenceId']}") - # handle_play(call_connection_id,HELLO_PROMPT,"continuousDtmfPlayContext") + #handle_play(call_connection_id,HELLO_PROMPT,"continuousDtmfPlayContext") stop_continuous_dtmf(call_connection_id=call_connection_id) elif event.type == "Microsoft.Communication.ContinuousDtmfRecognitionToneFailed": app.logger.info(f"Received ContinuousDtmfRecognitionToneFailed event for connection id: {call_connection_id}") diff --git a/callautomation-ga3-test-app/template/index.html b/callautomation-ga3-test-app/template/index.html index e3740bb..46a55e0 100644 --- a/callautomation-ga3-test-app/template/index.html +++ b/callautomation-ga3-test-app/template/index.html @@ -13,6 +13,7 @@

GA3 Call automation test app

+ From e945dadb29129c3b5d31e1c86465f9bdf569eab7 Mon Sep 17 00:00:00 2001 From: Pilli Vamshi Date: Mon, 20 May 2024 18:56:36 +0530 Subject: [PATCH 6/6] Tested Beta4 Changes --- callautomation-outboundcalling/main.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/callautomation-outboundcalling/main.py b/callautomation-outboundcalling/main.py index d20a573..042594f 100644 --- a/callautomation-outboundcalling/main.py +++ b/callautomation-outboundcalling/main.py @@ -58,19 +58,23 @@ def get_choices(): def get_media_recognize_choice_options(call_connection_client: CallConnectionClient, text_to_play: str, target_participant:str, choices: any, context: str): play_source = TextSource (text= text_to_play, voice_name= SPEECH_TO_TEXT_VOICE) + play_sources = [TextSource (text= "Recognize Prompt Test one", voice_name= SPEECH_TO_TEXT_VOICE),TextSource (text= "Recognize Prompt Test two", voice_name= SPEECH_TO_TEXT_VOICE),TextSource (text= text_to_play, voice_name= SPEECH_TO_TEXT_VOICE) ] call_connection_client.start_recognizing_media( input_type=RecognizeInputType.CHOICES, target_participant=target_participant, choices=choices, play_prompt=play_source, + # play_prompts=play_sources, interrupt_prompt=False, initial_silence_timeout=10, operation_context=context ) def handle_play(call_connection_client: CallConnectionClient, text_to_play: str): - play_source = TextSource(text=text_to_play, voice_name=SPEECH_TO_TEXT_VOICE) - call_connection_client.play_media_to_all(play_source) + play_source = TextSource(text=text_to_play, voice_name=SPEECH_TO_TEXT_VOICE) + play_source2 = TextSource(text="Interupt Play Source", voice_name=SPEECH_TO_TEXT_VOICE) + call_connection_client.play_media_to_all(play_source=play_source) + call_connection_client.play_media_to_all(play_source=play_source2, interrupt_call_media_operation=True ) # GET endpoint to place phone call @app.route('/outboundCall') @@ -82,6 +86,7 @@ def outbound_call_handler(): cognitive_services_endpoint=COGNITIVE_SERVICES_ENDPOINT, source_caller_id_number=source_caller) app.logger.info("Created call with connection id: %s", call_connection_properties.call_connection_id) + app.logger.info("answered by: %s", call_connection_properties.answered_by) return redirect("/") @@ -92,7 +97,10 @@ def callback_events_handler(): # Parsing callback events event = CloudEvent.from_dict(event_dict) call_connection_id = event.data['callConnectionId'] + correlation_id = event.data['correlationId'] app.logger.info("%s event received for call connection id: %s", event.type, call_connection_id) + app.logger.info("%s event received for call CORRELATION id: %s", event.type, correlation_id) + call_connection_client = call_automation_client.get_call_connection(call_connection_id) target_participant = PhoneNumberIdentifier(TARGET_PHONE_NUMBER) if event.type == "Microsoft.Communication.CallConnected": @@ -141,10 +149,11 @@ def callback_events_handler(): text_to_play=textToPlay, target_participant=target_participant, choices=get_choices(),context=RETRY_CONTEXT) - + elif event.type == "Microsoft.Communication.PlayStarted": + app.logger.info("Received Play Started event") elif event.type in ["Microsoft.Communication.PlayCompleted", "Microsoft.Communication.PlayFailed"]: app.logger.info("Terminating call") - call_connection_client.hang_up(is_for_everyone=True) + # call_connection_client.hang_up(is_for_everyone=True) return Response(status=200)