Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/angular/31.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Uwp 使用连续听写_Uwp - Fatal编程技术网

Uwp 使用连续听写

Uwp 使用连续听写,uwp,Uwp,一切都很好,我只是想知道如何保持我的听写会话活动,直到我再次按下按钮,因为现在发生的事情是,如果我停止说话10秒,然后再次开始说话,它将覆盖我所拥有的,所以我希望只有当我再次按下按钮时才能停止会话 case "0": isListening = true; var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenari

一切都很好,我只是想知道如何保持我的听写会话活动,直到我再次按下按钮,因为现在发生的事情是,如果我停止说话10秒,然后再次开始说话,它将覆盖我所拥有的,所以我希望只有当我再次按下按钮时才能停止会话

  case "0":
                isListening = true;
                var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "dictation");
                speechRecognizer.Constraints.Add(dictationConstraint);
                SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync();
                speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed;
                speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
                speechRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated;
                if (isListening) {
                    await speechRecognizer.ContinuousRecognitionSession.StartAsync();
                    textToSpeech.Background = (SolidColorBrush)Resources[ON];
                }
                break;
            case "1":
                if (richEbitBox.Document.Selection.CharacterFormat.Bold == FormatEffect.On) {
                    richEbitBox.Document.Selection.CharacterFormat.Bold = FormatEffect.Off;
                    FormatBoltText.Background = (SolidColorBrush)Resources[OFF];
                } else {
                    richEbitBox.Document.Selection.CharacterFormat.Bold = FormatEffect.On;
                    FormatBoltText.Background = (SolidColorBrush)Resources[ON];
                }
                break;
            case "2":
                if (richEbitBox.Document.Selection.CharacterFormat.Italic == FormatEffect.On) {
                    richEbitBox.Document.Selection.CharacterFormat.Italic = FormatEffect.Off;
                    formatItalicText.Background = (SolidColorBrush)Resources[OFF];
                } else {
                    richEbitBox.Document.Selection.CharacterFormat.Italic = FormatEffect.On;
                    formatItalicText.Background = (SolidColorBrush)Resources[ON];
                }
                break;
            case "3":
                if (richEbitBox.Document.Selection.CharacterFormat.Underline == UnderlineType.Single) {
                    richEbitBox.Document.Selection.CharacterFormat.Underline = UnderlineType.None;
                    formatUnderlineText.Background = (SolidColorBrush)Resources[OFF];
                } else {
                    richEbitBox.Document.Selection.CharacterFormat.Underline = UnderlineType.Single;
                    formatUnderlineText.Background = (SolidColorBrush)Resources[ON];
                }
                break;
            case "4":
                if (Ink_cnvas.Visibility == Visibility.Collapsed) {
                    formatDraw.Background = (SolidColorBrush)Resources[ON];
                    Ink_cnvas.Visibility = Visibility.Visible;
                    richEbitBox.Visibility = Visibility.Collapsed;
                } else if (Ink_cnvas.Visibility == Visibility.Visible) {
                    Ink_cnvas.Visibility = Visibility.Collapsed;
                    formatDraw.Background = (SolidColorBrush)Resources[OFF];
                    richEbitBox.Visibility = Visibility.Visible;
                }
                break;
            case "5":
                richEbitBox.Document.GetText(TextGetOptions.AdjustCrlf, out string value);
                speak(value);
                break;
            default:
                break;
        }
    }

    private async void SpeechRecognizer_HypothesisGenerated(SpeechRecognizer sender, SpeechRecognitionHypothesisGeneratedEventArgs args) {
        string hypothesis = args.Hypothesis.Text;
        string textboxContent = dictateBuilder.ToString() + " " + hypothesis + " ...";

        await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
        {
            richEbitBox.Document.SetText(TextSetOptions.None, textboxContent);
        });
    }

    private async void ContinuousRecognitionSession_ResultGenerated(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionResultGeneratedEventArgs args) {
        await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => {
            richEbitBox.Document.SetText(TextSetOptions.None, args.Result.Text);
        });
    }

    private async void ContinuousRecognitionSession_Completed(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionCompletedEventArgs args) {
    }
使用连续听写

通过简单的检查,我发现您缺少一些代码。如果您想使用听写,请添加以下代码

var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "dictation");
speechRecognizer.Constraints.Add(dictationConstraint);
我们有完整的样本可以使用,请检查SpeechRecognitionAndSynthesis代码样本。我进行了测试,效果良好,请将此场景集成到您的应用程序中

更新

例如,开始写“我的蓝鸭子有一条马尾”,然后我点击空格说“我想自杀”。我还想在按下按钮时停止听写

基于官方代码示例很容易实现,您只需使用
DesteredTextBuilder
StartAsync
之前附加当前文本框的文本,在DesteredTextBuilder之后清除
StopAsync
方法,如下所示。请为官方代码示例
口述文本框
为只读请修改为
IsReadOnly=“False”


我忘了提到,当我再次按下按钮时,我得到了一个奇怪的异常:“无法找到与此错误相关的文本”。顺便说一句,口述的未来取决于案例“0”。谢谢您的帮助,先生。我可能需要你再帮我一次,控制按钮和会话。我编辑了代码,我现在也想知道是否有办法保存用户键入的内容。每次我尝试连接RichEdit文本的文本时,我都会得到相同的提示您能解释一下相同的提示吗?很抱歉,这是一个打字错误。我的目标是,例如,我想写我想写的任何东西,然后说话,反之亦然,应用程序将把写的文本与说的文本连接起来,例如,开始写“我的蓝鸭子有一条马尾”,然后我点击空格说“我想自杀”。我还想在按下按钮时停止听写。我完全卡住了
public async void ContinuousRecognize_Click(object sender, RoutedEventArgs e)
{
    btnContinuousRecognize.IsEnabled = false;
    if (isListening == false)
    {
        // The recognizer can only start listening in a continuous fashion if the recognizer is currently idle.
        // This prevents an exception from occurring.
        if (speechRecognizer.State == SpeechRecognizerState.Idle)
        {
            DictationButtonText.Text = " Stop Dictation";
            cbLanguageSelection.IsEnabled = false;
            hlOpenPrivacySettings.Visibility = Visibility.Collapsed;
            discardedTextBlock.Visibility = Windows.UI.Xaml.Visibility.Collapsed;

            try
            {
                isListening = true;
                dictatedTextBuilder.Append(dictationTextBox.Text);
                await speechRecognizer.ContinuousRecognitionSession.StartAsync();
            }
            catch (Exception ex)
            {
                if ((uint)ex.HResult == HResultPrivacyStatementDeclined)
                {
                    // Show a UI link to the privacy settings.
                    hlOpenPrivacySettings.Visibility = Visibility.Visible;
                }
                else
                {
                    var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception");
                    await messageDialog.ShowAsync();
                }

                isListening = false;
                DictationButtonText.Text = " Dictate";
                cbLanguageSelection.IsEnabled = true;

            }
        }
    }
    else
    {
        isListening = false;
        DictationButtonText.Text = " Dictate";
        cbLanguageSelection.IsEnabled = true;

        if (speechRecognizer.State != SpeechRecognizerState.Idle)
        {
            // Cancelling recognition prevents any currently recognized speech from
            // generating a ResultGenerated event. StopAsync() will allow the final session to 
            // complete.
            try
            {
                await speechRecognizer.ContinuousRecognitionSession.StopAsync();

                // Ensure we don't leave any hypothesis text behind
                dictationTextBox.Text = dictatedTextBuilder.ToString();
                dictatedTextBuilder.Clear();
            }
            catch (Exception exception)
            {
                var messageDialog = new Windows.UI.Popups.MessageDialog(exception.Message, "Exception");
                await messageDialog.ShowAsync();
            }
        }
    }
    btnContinuousRecognize.IsEnabled = true;

}