diff --git a/ListenerApp/ContentView.swift b/ListenerApp/ContentView.swift index 83f3a8b..1459297 100644 --- a/ListenerApp/ContentView.swift +++ b/ListenerApp/ContentView.swift @@ -15,6 +15,12 @@ struct ContentView: View { @State private var ipAddress = "" @State private var isEditing = false + let LISTEN_STATE_MSG = 1 + let LISTEN_TEXT_MSG = 2 + + let port = 19026 + @State private var client: TCPClient? + private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))! @State private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest? @@ -49,7 +55,18 @@ struct ContentView: View { } func validate(destination : String) { - listenEnabled = true + client = TCPClient(address: destination, port: Int32(port)) + guard let client = client else { return } + switch client.connect(timeout: 10) { + case .success: + listenEnabled = true + case .failure(let error): + client.close() + self.client = nil + textHeard.append("\n") + textHeard.append(String(describing: error)) + break + } } func listen() { @@ -84,19 +101,50 @@ struct ContentView: View { } } + guard let client = client else { return } + if (self.listening) { + switch (client.send(data: isListening())) { + case .success: + break + case .failure(let error): + self.listening = false + textHeard.append("\n") + textHeard.append(String(describing: error)) + } + } + if (self.listening) { do { try startRecording() } catch { - + self.listening = false } - } else { + } + + if (!self.listening) { audioEngine.stop() recognitionRequest?.endAudio() + switch (client.send(data: isListening())) { + case .success: + break + case .failure(let error): + self.listening = false + textHeard.append("\n") + textHeard.append(String(describing: error)) + } } } + + private func isListening() -> Data { + return pack("CFBundleShortVersionString 1.0 CFBundleVersion - 46 + 61 LSRequiresIPhoneOS NSSpeechRecognitionUsageDescription