以下SwiftUI代码(基于以上内容)与 iOS 17+ 合作。 它使用苹果的语音识别码( 也包含在下面) 。 我更新了该代码, 以便使用更新的 AVAudioApplication 。 请求RecordPermission( 完成 ) 函数( 以封住过期警告) 。 在您的工程中包含语音识别器文件, 然后添加语音查看代码。 仅对 iPhone 进行测试, 但也应该对 iPads 进行工作 。
import SwiftUI
import MediaPlayer
import AVFoundation
import Speech
struct SpeechView: View {
@State var textValue: String = "Press start to record speech..."
@State var speechRecognizer = SpeechRecognizer()
@State private var spokenText: String = ""
@State private var isRecording: Bool = false
var body: some View {
VStack(spacing: 50) {
Image(systemName: isRecording ? "mic.fill" : "mic")
.imageScale(.large)
.scaleEffect(2.0)
.foregroundColor(isRecording ? .accentColor : .primary)
Text(isRecording ? textValue : "Press start to record speech...")
if !isRecording {
Button {
print("starting speech recognition")
self.textValue = ""
self.spokenText = ""
isRecording = true
speechRecognizer.record(to: $textValue)
} label: {
Text("Start")
}
} else {
Button {
print("stopping speech recognition")
isRecording = false
spokenText = textValue
speechRecognizer.stopRecording()
textValue = "Press start to record speech..."
print(spokenText)
} label: {
Text("Stop")
}
}
if !isRecording {
Text(spokenText)
}
}
.padding()
}
}
#Preview {
SpeechView()
}
//
// SpeechRecognizer.swift
// Scrumdinger
//
// Created by Ahmad Dorra on 2/11/21.
//
import AVFoundation
import Foundation
import Speech
import SwiftUI
/// A helper for transcribing speech to text using AVAudioEngine.
struct SpeechRecognizer {
private class SpeechAssist {
var audioEngine: AVAudioEngine?
var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
var recognitionTask: SFSpeechRecognitionTask?
let speechRecognizer = SFSpeechRecognizer()
deinit {
reset()
}
func reset() {
recognitionTask?.cancel()
audioEngine?.stop()
audioEngine = nil
recognitionRequest = nil
recognitionTask = nil
}
}
private let assistant = SpeechAssist()
/**
Begin transcribing audio.
Creates a `SFSpeechRecognitionTask` that transcribes speech to text until you call `stopRecording()`.
The resulting transcription is continuously written to the provided text binding.
- Parameters:
- speech: A binding to a string where the transcription is written.
*/
func record(to speech: Binding<String>) {
relay(speech, message: "Requesting access")
canAccess { authorized in
guard authorized else {
relay(speech, message: "Access denied")
return
}
relay(speech, message: "Access granted")
assistant.audioEngine = AVAudioEngine()
guard let audioEngine = assistant.audioEngine else {
fatalError("Unable to create audio engine")
}
assistant.recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = assistant.recognitionRequest else {
fatalError("Unable to create request")
}
recognitionRequest.shouldReportPartialResults = true
do {
relay(speech, message: "Booting audio subsystem")
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
let inputNode = audioEngine.inputNode
relay(speech, message: "Found input node")
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
recognitionRequest.append(buffer)
}
relay(speech, message: "Preparing audio engine")
audioEngine.prepare()
try audioEngine.start()
assistant.recognitionTask = assistant.speechRecognizer?.recognitionTask(with: recognitionRequest) { (result, error) in
var isFinal = false
if let result = result {
relay(speech, message: result.bestTranscription.formattedString)
isFinal = result.isFinal
}
if error != nil || isFinal {
audioEngine.stop()
inputNode.removeTap(onBus: 0)
self.assistant.recognitionRequest = nil
}
}
} catch {
print("Error transcribing audio: " + error.localizedDescription)
assistant.reset()
}
}
}
/// Stop transcribing audio.
func stopRecording() {
assistant.reset()
}
private func canAccess(withHandler handler: @escaping (Bool) -> Void) {
SFSpeechRecognizer.requestAuthorization { status in
if status == .authorized {
AVAudioApplication.requestRecordPermission { authorized in
handler(authorized)
}
} else {
handler(false)
}
}
}
private func relay(_ binding: Binding<String>, message: String) {
DispatchQueue.main.async {
binding.wrappedValue = message
}
}
}
HTH, HTH, HTH, HTH, HTH, HTH, HTH, HTH, HT, HTH