Skip to content

Commit 1e5240d

Browse files
Merge branch '983303-kanban-dev' of https://github.com/syncfusion-content/blazor-docs into 983303-kanban-dev
2 parents 0bf71eb + a12900e commit 1e5240d

File tree

95 files changed

+3717
-3109
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

95 files changed

+3717
-3109
lines changed

blazor-toc.html

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -690,6 +690,12 @@
690690
<li><a href="/blazor/ai-assistview/custom-view">Custom views</a></li>
691691
<li><a href="/blazor/ai-assistview/file-attachments">File attachments</a></li>
692692
<li><a href="/blazor/ai-assistview/templates">Templates</a></li>
693+
<li>Speech
694+
<ul>
695+
<li><a href="/blazor/ai-assistview/speech/speech-to-text">Speech to Text</a></li>
696+
<li><a href="/blazor/ai-assistview/speech/text-to-speech">Text to Speech</a></li>
697+
</ul>
698+
</li>
693699
<li><a href="/blazor/ai-assistview/appearance">Appearance</a></li>
694700
<li><a href="/blazor/ai-assistview/accessibility">Accessibility</a></li>
695701
<li><a href="/blazor/ai-assistview/methods">Methods</a></li>
13.6 KB
Loading
25.1 KB
Loading
Lines changed: 273 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,273 @@
1+
---
2+
layout: post
3+
title: Speech-to-Text with Blazor AI AssistView Component | Syncfusion
4+
description: Checkout and learn about Speech-to-Text configuration with Blazor AI AssistView component in Blazor Server App and Blazor WebAssembly App.
5+
platform: Blazor
6+
control: AI AssistView
7+
documentation: ug
8+
---
9+
10+
# Speech-to-Text in Blazor AI AssistView
11+
12+
The Syncfusion Blazor AI AssistView component supports `Speech-to-Text` functionality through the browser's [Web Speech API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Speech_API), enabling conversion of spoken words into text using the device's microphone.
13+
14+
## Prerequisites
15+
16+
Before integrating `Speech-to-Text`, ensure the following:
17+
18+
1. The Syncfusion AI AssistView component is properly set up in your Blazor application.
19+
- [Blazor Getting Started Guide](../getting-started)
20+
21+
2. The AI AssistView component is integrated with [Azure OpenAI](https://microsoft.github.io/PartnerResources/skilling/ai-ml-academy/resources/openai).
22+
- [Integration of Azure OpenAI With Blazor AI AssistView component](../ai-integrations/openai-integration.md)
23+
24+
## Configure Speech-to-Text
25+
26+
To enable Speech-to-Text functionality, modify the `Home.razor` file to incorporate the Web Speech API. The [SpeechToText](https://blazor.syncfusion.com/documentation/speech-to-text/getting-started-web-app) component listens for microphone input, transcribes spoken words, and updates the AI AssistView's editable footer with the transcribed text. The transcribed text is then sent as a prompt to the Azure OpenAI service via the AI AssistView component.
27+
28+
The `speechtotext.js` file handles operations related to the content of the editable footer, such as checking for meaningful input, clearing existing text, and updating the content with the transcribed value. Meanwhile, the `speechtotext.css` file styles the AI AssistView layout and ensures the component remains responsive across different screen sizes and devices.
29+
30+
{% tabs %}
31+
{% highlight c# tabtitle="razor" %}
32+
33+
@using Syncfusion.Blazor.InteractiveChat
34+
@using AssistView_OpenAI.Components.Services
35+
@using Syncfusion.Blazor.Navigations
36+
@using Syncfusion.Blazor.Inputs
37+
@using Syncfusion.Blazor.Buttons
38+
@inject AzureOpenAIService OpenAIService
39+
@inject IJSRuntime JSRuntime
40+
41+
<div class="integration-speechtotext-section">
42+
<SfAIAssistView @ref="assistView" PromptRequested="@PromptRequest">
43+
<AssistViews>
44+
<AssistView>
45+
<FooterTemplate>
46+
<div class="e-footer-wrapper">
47+
<div id="assistview-footer" class="content-editor" contenteditable="true" placeholder="Click to speak or start typing..." @oninput="@UpdateContent" @onkeydown="@OnKeyDown" @ref="@EditableDiv">@AssistViewFooterValue</div>
48+
<div class="option-container">
49+
<SfSpeechToText ID="speechToText" TranscriptChanging="@OnTranscriptChange" SpeechRecognitionStopped="@HandleStopRecognition"
50+
CssClass="@($"e-flat {SpeechToTextCssClass}")"></SfSpeechToText>
51+
<SfButton ID="assistview-sendButton" IconCss="e-assist-send e-icons" CssClass="@ButtonCssClass" @onclick="SendButtonClicked"></SfButton>
52+
</div>
53+
</div>
54+
</FooterTemplate>
55+
<BannerTemplate>
56+
<div class="banner-content">
57+
<div class="e-icons e-listen-icon"></div>
58+
<i>Click the below mic-button to convert your voice to text.</i>
59+
</div>
60+
</BannerTemplate>
61+
</AssistView>
62+
</AssistViews>
63+
<AssistViewToolbar ItemClicked="ToolbarItemClicked">
64+
<AssistViewToolbarItem Type="ItemType.Spacer"></AssistViewToolbarItem>
65+
<AssistViewToolbarItem IconCss="e-icons e-refresh"></AssistViewToolbarItem>
66+
</AssistViewToolbar>
67+
<PromptToolbar ItemClicked="PromptToolbarItemClicked"></PromptToolbar>
68+
</SfAIAssistView>
69+
</div>
70+
71+
@code {
72+
private SfAIAssistView assistView;
73+
private string finalResponse { get; set; }
74+
private string AssistViewFooterValue = String.Empty;
75+
private ElementReference EditableDiv;
76+
private string FooterContent = String.Empty;
77+
private string SpeechToTextCssClass = "visible";
78+
private string ButtonCssClass = String.Empty;
79+
80+
private async void OnTranscriptChange(TranscriptChangeEventArgs args)
81+
{
82+
AssistViewFooterValue = args.Transcript;
83+
await JSRuntime.InvokeVoidAsync("updateContentEditableDiv", EditableDiv, AssistViewFooterValue);
84+
await InvokeAsync(StateHasChanged);
85+
}
86+
private async Task UpdateContent()
87+
{
88+
FooterContent = await JSRuntime.InvokeAsync<String>("isFooterContainsValue", EditableDiv);
89+
ToggleVisibility();
90+
}
91+
private async Task HandleStopRecognition()
92+
{
93+
FooterContent = AssistViewFooterValue;
94+
ToggleVisibility();
95+
await InvokeAsync(StateHasChanged);
96+
}
97+
private void ToggleVisibility()
98+
{
99+
ButtonCssClass = string.IsNullOrWhiteSpace(FooterContent) ? "" : "visible";
100+
SpeechToTextCssClass = string.IsNullOrWhiteSpace(FooterContent) ? "visible" : "";
101+
}
102+
private async Task PromptRequest(AssistViewPromptRequestedEventArgs args)
103+
{
104+
AssistViewFooterValue = String.Empty;
105+
await JSRuntime.InvokeVoidAsync("emptyFooterValue", EditableDiv);
106+
await UpdateContent();
107+
var lastIdx = assistView.Prompts.Count - 1;
108+
assistView.Prompts[lastIdx].Response = string.Empty;
109+
finalResponse = string.Empty;
110+
try
111+
{
112+
await foreach (var chunk in OpenAIService.GetChatResponseStreamAsync(args.Prompt))
113+
{
114+
await UpdateResponse(args, chunk);
115+
}
116+
117+
args.Response = finalResponse;
118+
}
119+
catch (Exception ex)
120+
{
121+
args.Response = $"Error: {ex.Message}";
122+
}
123+
ToggleVisibility();
124+
}
125+
126+
private async Task UpdateResponse(AssistViewPromptRequestedEventArgs args, string response)
127+
{
128+
var lastIdx = assistView.Prompts.Count - 1;
129+
await Task.Delay(30); // Small delay for UI updates
130+
assistView.Prompts[lastIdx].Response += response.Replace("\n", "<br>");
131+
finalResponse = assistView.Prompts[lastIdx].Response;
132+
StateHasChanged();
133+
}
134+
135+
private async Task SendButtonClicked()
136+
{
137+
await assistView.ExecutePromptAsync(FooterContent);
138+
}
139+
private void ToolbarItemClicked(AssistViewToolbarItemClickedEventArgs args)
140+
{
141+
if (args.Item.IconCss == "e-icons e-refresh")
142+
{
143+
assistView.Prompts.Clear();
144+
}
145+
}
146+
private async Task OnKeyDown(KeyboardEventArgs e)
147+
{
148+
if (e.Key == "Enter" && !e.ShiftKey)
149+
{
150+
await SendButtonClicked();
151+
}
152+
}
153+
private async void PromptToolbarItemClicked(AssistViewToolbarItemClickedEventArgs args)
154+
{
155+
if (args.Item.IconCss == "e-icons e-assist-edit") {
156+
AssistViewFooterValue = assistView.Prompts[args.DataIndex].Prompt;
157+
await JSRuntime.InvokeVoidAsync("updateContentEditableDiv", EditableDiv, AssistViewFooterValue);
158+
await UpdateContent();
159+
}
160+
}
161+
}
162+
163+
{% endhighlight %}
164+
165+
{% highlight c# tabtitle="speechtotext.js" %}
166+
167+
// Checks if the content editable element contains meaningful text and cleans up.
168+
function isFooterContainsValue(elementref) {
169+
if (!elementref.innerText.trim() !== '') {
170+
if ((elementref.innerHTML === '<br>' || elementref.innerHTML.trim() === '')) {
171+
elementref.innerHTML = '';
172+
}
173+
}
174+
return elementref.innerText || "";
175+
}
176+
// Clears the text content of a content editable element.
177+
function emptyFooterValue(elementref) {
178+
if (elementref) {
179+
elementref.innerText = "";
180+
}
181+
}
182+
// Updates the text content of a content editable element with a specified value.
183+
function updateContentEditableDiv(element, value) {
184+
if (element) {
185+
element.innerText = value;
186+
}
187+
}
188+
189+
{% endhighlight %}
190+
191+
{% highlight c# tabtitle="speechtotext.css" %}
192+
193+
.integration-speechtotext-section {
194+
height: 350px;
195+
width: 650px;
196+
margin: 0 auto;
197+
}
198+
199+
.integration-speechtotext-section .banner-content .e-listen-icon:before {
200+
font-size: 25px;
201+
}
202+
203+
.integration-speechtotext-section .e-view-container {
204+
margin: auto;
205+
}
206+
207+
.integration-speechtotext-section .banner-content {
208+
display: flex;
209+
flex-direction: column;
210+
gap: 10px;
211+
text-align: center;
212+
}
213+
214+
.integration-speechtotext-section #assistview-sendButton {
215+
width: 40px;
216+
height: 40px;
217+
font-size: 20px;
218+
border: none;
219+
background: none;
220+
cursor: pointer;
221+
}
222+
223+
.integration-speechtotext-section #speechToText.visible,
224+
.integration-speechtotext-section #assistview-sendButton.visible {
225+
display: inline-block;
226+
}
227+
228+
.integration-speechtotext-section #speechToText,
229+
.integration-speechtotext-section #assistview-sendButton {
230+
display: none;
231+
}
232+
233+
@media only screen and (max-width: 750px) {
234+
.integration-speechtotext-section {
235+
width: 100%;
236+
}
237+
}
238+
239+
.integration-speechtotext-section .e-footer-wrapper {
240+
display: flex;
241+
border: 1px solid #c1c1c1;
242+
padding: 5px 5px 5px 10px;
243+
margin: 5px 5px 0 5px;
244+
border-radius: 30px;
245+
}
246+
247+
.integration-speechtotext-section .content-editor {
248+
width: 100%;
249+
overflow-y: auto;
250+
font-size: 14px;
251+
min-height: 25px;
252+
max-height: 200px;
253+
padding: 10px;
254+
}
255+
256+
.integration-speechtotext-section .content-editor[contentEditable=true]:empty:before {
257+
content: attr(placeholder);
258+
color: #6b7280;
259+
}
260+
261+
.integration-speechtotext-section .option-container {
262+
align-self: flex-end;
263+
}
264+
265+
{% endhighlight %}
266+
267+
{% endtabs %}
268+
269+
![Integrating Speech-to-Text with AI AssistView](../images/assist-stt.png)
270+
271+
## See Also
272+
273+
* [Text-to-Speech](./text-to-speech.md)

0 commit comments

Comments
 (0)