diff --git a/ListenerGS.xcodeproj/project.pbxproj b/ListenerGS.xcodeproj/project.pbxproj index b48473b..f676bd2 100644 --- a/ListenerGS.xcodeproj/project.pbxproj +++ b/ListenerGS.xcodeproj/project.pbxproj @@ -273,7 +273,7 @@ 9D5155E726A1EF7B0075EBC7 /* Project object */ = { isa = PBXProject; attributes = { - LastSwiftUpdateCheck = 1240; + LastSwiftUpdateCheck = 1320; LastUpgradeCheck = 1240; TargetAttributes = { 9D5155EE26A1EF7B0075EBC7 = { diff --git a/ListenerGS.xcodeproj/xcuserdata/jrand.xcuserdatad/xcschemes/xcschememanagement.plist b/ListenerGS.xcodeproj/xcuserdata/jrand.xcuserdatad/xcschemes/xcschememanagement.plist index 0dbda45..1486bb7 100644 --- a/ListenerGS.xcodeproj/xcuserdata/jrand.xcuserdatad/xcschemes/xcschememanagement.plist +++ b/ListenerGS.xcodeproj/xcuserdata/jrand.xcuserdatad/xcschemes/xcschememanagement.plist @@ -15,5 +15,23 @@ 0 + SuppressBuildableAutocreation + + 9D62FC5F27C494D700AEE01F + + primary + + + 9D62FC6F27C494D900AEE01F + + primary + + + 9D62FC7927C494D900AEE01F + + primary + + + diff --git a/ListenerGS/Info.plist b/ListenerGS/Info.plist index 77bbe6c..0ac7358 100644 --- a/ListenerGS/Info.plist +++ b/ListenerGS/Info.plist @@ -19,7 +19,7 @@ CFBundleShortVersionString 1.0 CFBundleVersion - 378 + 390 LSApplicationCategoryType public.app-category.utilities LSRequiresIPhoneOS diff --git a/ListenerGS/SpeechForwarder.swift b/ListenerGS/SpeechForwarder.swift index 1ec400a..d021b5f 100644 --- a/ListenerGS/SpeechForwarder.swift +++ b/ListenerGS/SpeechForwarder.swift @@ -113,6 +113,9 @@ class SpeechForwarder : ObservableObject { logger.debug("Stopped listening") audioEngine.stop() recognitionRequest?.endAudio() + recognitionTask?.cancel() + audioEngine.inputNode.removeTap(onBus: 0); + audioEngine.inputNode.reset() switch (client.send(data: isListening())) { case .success: break @@ -177,6 +180,12 @@ class SpeechForwarder : ObservableObject { try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers) try audioSession.setActive(true, options: .notifyOthersOnDeactivation) let inputNode = audioEngine.inputNode + + // Configure the microphone input. + let recordingFormat = inputNode.outputFormat(forBus: 0) + inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in + self.recognitionRequest?.append(buffer) + } // Create and configure the speech recognition request. recognitionRequest = SFSpeechAudioBufferRecognitionRequest() @@ -202,6 +211,10 @@ class SpeechForwarder : ObservableObject { print("Text \(result.bestTranscription.formattedString)") } + if error != nil { + self.logger.error("Error from recognizer: \(String(describing: error))") + } + if error != nil || isFinal { // Stop recognizing speech if there is a problem. self.audioEngine.stop() @@ -220,12 +233,6 @@ class SpeechForwarder : ObservableObject { } } } - - // Configure the microphone input. - let recordingFormat = inputNode.outputFormat(forBus: 0) - inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in - self.recognitionRequest?.append(buffer) - } audioEngine.prepare() try audioEngine.start()