删除无关代码

This commit is contained in:
2026-02-11 18:58:30 +08:00
parent 4168da618e
commit a83fd918a8
23 changed files with 6 additions and 3669 deletions

View File

@@ -57,16 +57,8 @@
046086752F191CC700757C95 /* AI技术分析.txt in Resources */ = {isa = PBXBuildFile; fileRef = 046086742F191CC700757C95 /* AI技术分析.txt */; };
0460869A2F19238500757C95 /* KBAiWaveformView.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086992F19238500757C95 /* KBAiWaveformView.m */; };
0460869C2F19238500757C95 /* KBAiRecordButton.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086972F19238500757C95 /* KBAiRecordButton.m */; };
046086B12F19239B00757C95 /* SubtitleSync.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086AC2F19239B00757C95 /* SubtitleSync.m */; };
046086B22F19239B00757C95 /* TTSServiceClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086B02F19239B00757C95 /* TTSServiceClient.m */; };
046086B32F19239B00757C95 /* AudioSessionManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A22F19239B00757C95 /* AudioSessionManager.m */; };
046086B42F19239B00757C95 /* LLMStreamClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A82F19239B00757C95 /* LLMStreamClient.m */; };
046086B52F19239B00757C95 /* Segmenter.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086AA2F19239B00757C95 /* Segmenter.m */; };
046086B62F19239B00757C95 /* TTSPlaybackPipeline.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086AE2F19239B00757C95 /* TTSPlaybackPipeline.m */; };
046086B72F19239B00757C95 /* ConversationOrchestrator.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A62F19239B00757C95 /* ConversationOrchestrator.m */; };
046086B82F19239B00757C95 /* ASRStreamClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 0460869E2F19239B00757C95 /* ASRStreamClient.m */; };
046086B92F19239B00757C95 /* AudioCaptureManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A02F19239B00757C95 /* AudioCaptureManager.m */; };
046086BA2F19239B00757C95 /* AudioStreamPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A42F19239B00757C95 /* AudioStreamPlayer.m */; };
046086BD2F1A039F00757C95 /* KBAICommentView.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086BC2F1A039F00757C95 /* KBAICommentView.m */; };
046086CB2F1A092500757C95 /* comments_mock.json in Resources */ = {isa = PBXBuildFile; fileRef = 046086C62F1A092500757C95 /* comments_mock.json */; };
046086CC2F1A092500757C95 /* KBAIReplyModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086CA2F1A092500757C95 /* KBAIReplyModel.m */; };
@@ -148,7 +140,6 @@
048FFD302F29F3C3005D62AE /* KBAIMessageZanVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD2F2F29F3C3005D62AE /* KBAIMessageZanVC.m */; };
048FFD332F29F3D2005D62AE /* KBAIMessageChatingVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD322F29F3D2005D62AE /* KBAIMessageChatingVC.m */; };
048FFD342F29F400005D62AE /* KBAIMessageListVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD362F29F400005D62AE /* KBAIMessageListVC.m */; };
048FFD362F29F88E005D62AE /* AIMessageVM.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD352F29F88E005D62AE /* AIMessageVM.m */; };
048FFD372F29F410005D62AE /* KBAIMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD392F29F410005D62AE /* KBAIMessageCell.m */; };
048FFD392F2A24C5005D62AE /* KBAIChatMessageCacheManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD382F2A24C5005D62AE /* KBAIChatMessageCacheManager.m */; };
048FFD3C2F29F500005D62AE /* KBLikedCompanionModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD3B2F29F500005D62AE /* KBLikedCompanionModel.m */; };
@@ -209,6 +200,7 @@
04B5A1A22EEFA12300AAAAAA /* KBPayProductModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 04B5A1A12EEFA12300AAAAAA /* KBPayProductModel.m */; };
04BBF89D2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 04BBF89A2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m */; };
04BBF89E2F3ACD8800B1FBB2 /* KBTestVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 04BBF89C2F3ACD8800B1FBB2 /* KBTestVC.m */; };
04BBF9002F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 04BBF8FF2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m */; };
04C6EABA2EAF86530089C901 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 04C6EAAE2EAF86530089C901 /* Assets.xcassets */; };
04C6EABC2EAF86530089C901 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 04C6EAB12EAF86530089C901 /* LaunchScreen.storyboard */; };
04C6EABD2EAF86530089C901 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 04C6EAB42EAF86530089C901 /* Main.storyboard */; };
@@ -222,10 +214,7 @@
04D1F6B32EDFF10A00B12345 /* KBSkinInstallBridge.m in Sources */ = {isa = PBXBuildFile; fileRef = 04D1F6B12EDFF10A00B12345 /* KBSkinInstallBridge.m */; };
04E0383E2F1A7C30002CA5A0 /* KBCustomTabBar.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E0383D2F1A7C30002CA5A0 /* KBCustomTabBar.m */; };
04E038D82F20BFFB002CA5A0 /* websocket-api.md in Resources */ = {isa = PBXBuildFile; fileRef = 04E038D72F20BFFB002CA5A0 /* websocket-api.md */; };
04E038DD2F20C420002CA5A0 /* VoiceChatStreamingManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038DA2F20C420002CA5A0 /* VoiceChatStreamingManager.m */; };
04E038DE2F20C420002CA5A0 /* VoiceChatWebSocketClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038DC2F20C420002CA5A0 /* VoiceChatWebSocketClient.m */; };
04E038E32F20E500002CA5A0 /* deepgramAPI.md in Resources */ = {isa = PBXBuildFile; fileRef = 04E038E22F20E500002CA5A0 /* deepgramAPI.md */; };
04E038E82F20E877002CA5A0 /* DeepgramWebSocketClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038E72F20E877002CA5A0 /* DeepgramWebSocketClient.m */; };
04E038E92F20E877002CA5A0 /* DeepgramStreamingManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038E52F20E877002CA5A0 /* DeepgramStreamingManager.m */; };
04E038EF2F21F0EC002CA5A0 /* AiVM.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038EE2F21F0EC002CA5A0 /* AiVM.m */; };
04E0394B2F236E75002CA5A0 /* KBChatUserMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E0394A2F236E75002CA5A0 /* KBChatUserMessageCell.m */; };
@@ -410,26 +399,10 @@
046086972F19238500757C95 /* KBAiRecordButton.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAiRecordButton.m; sourceTree = "<group>"; };
046086982F19238500757C95 /* KBAiWaveformView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAiWaveformView.h; sourceTree = "<group>"; };
046086992F19238500757C95 /* KBAiWaveformView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAiWaveformView.m; sourceTree = "<group>"; };
0460869D2F19239B00757C95 /* ASRStreamClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ASRStreamClient.h; sourceTree = "<group>"; };
0460869E2F19239B00757C95 /* ASRStreamClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ASRStreamClient.m; sourceTree = "<group>"; };
0460869F2F19239B00757C95 /* AudioCaptureManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioCaptureManager.h; sourceTree = "<group>"; };
046086A02F19239B00757C95 /* AudioCaptureManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioCaptureManager.m; sourceTree = "<group>"; };
046086A12F19239B00757C95 /* AudioSessionManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioSessionManager.h; sourceTree = "<group>"; };
046086A22F19239B00757C95 /* AudioSessionManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioSessionManager.m; sourceTree = "<group>"; };
046086A32F19239B00757C95 /* AudioStreamPlayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioStreamPlayer.h; sourceTree = "<group>"; };
046086A42F19239B00757C95 /* AudioStreamPlayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioStreamPlayer.m; sourceTree = "<group>"; };
046086A52F19239B00757C95 /* ConversationOrchestrator.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ConversationOrchestrator.h; sourceTree = "<group>"; };
046086A62F19239B00757C95 /* ConversationOrchestrator.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ConversationOrchestrator.m; sourceTree = "<group>"; };
046086A72F19239B00757C95 /* LLMStreamClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = LLMStreamClient.h; sourceTree = "<group>"; };
046086A82F19239B00757C95 /* LLMStreamClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = LLMStreamClient.m; sourceTree = "<group>"; };
046086A92F19239B00757C95 /* Segmenter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Segmenter.h; sourceTree = "<group>"; };
046086AA2F19239B00757C95 /* Segmenter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Segmenter.m; sourceTree = "<group>"; };
046086AB2F19239B00757C95 /* SubtitleSync.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SubtitleSync.h; sourceTree = "<group>"; };
046086AC2F19239B00757C95 /* SubtitleSync.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SubtitleSync.m; sourceTree = "<group>"; };
046086AD2F19239B00757C95 /* TTSPlaybackPipeline.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TTSPlaybackPipeline.h; sourceTree = "<group>"; };
046086AE2F19239B00757C95 /* TTSPlaybackPipeline.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TTSPlaybackPipeline.m; sourceTree = "<group>"; };
046086AF2F19239B00757C95 /* TTSServiceClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TTSServiceClient.h; sourceTree = "<group>"; };
046086B02F19239B00757C95 /* TTSServiceClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TTSServiceClient.m; sourceTree = "<group>"; };
046086BB2F1A039F00757C95 /* KBAICommentView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAICommentView.h; sourceTree = "<group>"; };
046086BC2F1A039F00757C95 /* KBAICommentView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAICommentView.m; sourceTree = "<group>"; };
046086C62F1A092500757C95 /* comments_mock.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = comments_mock.json; sourceTree = "<group>"; };
@@ -579,9 +552,7 @@
048FFD2F2F29F3C3005D62AE /* KBAIMessageZanVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAIMessageZanVC.m; sourceTree = "<group>"; };
048FFD312F29F3D2005D62AE /* KBAIMessageChatingVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIMessageChatingVC.h; sourceTree = "<group>"; };
048FFD322F29F3D2005D62AE /* KBAIMessageChatingVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAIMessageChatingVC.m; sourceTree = "<group>"; };
048FFD342F29F88E005D62AE /* AIMessageVM.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AIMessageVM.h; sourceTree = "<group>"; };
048FFD352F29F400005D62AE /* KBAIMessageListVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIMessageListVC.h; sourceTree = "<group>"; };
048FFD352F29F88E005D62AE /* AIMessageVM.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AIMessageVM.m; sourceTree = "<group>"; };
048FFD362F29F400005D62AE /* KBAIMessageListVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAIMessageListVC.m; sourceTree = "<group>"; };
048FFD372F2A24C5005D62AE /* KBAIChatMessageCacheManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIChatMessageCacheManager.h; sourceTree = "<group>"; };
048FFD382F29F410005D62AE /* KBAIMessageCell.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIMessageCell.h; sourceTree = "<group>"; };
@@ -688,6 +659,8 @@
04BBF89A2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBKeyboardStressTestVC.m; sourceTree = "<group>"; };
04BBF89B2F3ACD8800B1FBB2 /* KBTestVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBTestVC.h; sourceTree = "<group>"; };
04BBF89C2F3ACD8800B1FBB2 /* KBTestVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBTestVC.m; sourceTree = "<group>"; };
04BBF8FE2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DeepgramWebSocketClient.h; sourceTree = "<group>"; };
04BBF8FF2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DeepgramWebSocketClient.m; sourceTree = "<group>"; };
04C6EAAC2EAF86530089C901 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
04C6EAAD2EAF86530089C901 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
04C6EAAE2EAF86530089C901 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
@@ -711,15 +684,9 @@
04E0383C2F1A7C30002CA5A0 /* KBCustomTabBar.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBCustomTabBar.h; sourceTree = "<group>"; };
04E0383D2F1A7C30002CA5A0 /* KBCustomTabBar.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBCustomTabBar.m; sourceTree = "<group>"; };
04E038D72F20BFFB002CA5A0 /* websocket-api.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = "websocket-api.md"; sourceTree = "<group>"; };
04E038D92F20C420002CA5A0 /* VoiceChatStreamingManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VoiceChatStreamingManager.h; sourceTree = "<group>"; };
04E038DA2F20C420002CA5A0 /* VoiceChatStreamingManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VoiceChatStreamingManager.m; sourceTree = "<group>"; };
04E038DB2F20C420002CA5A0 /* VoiceChatWebSocketClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VoiceChatWebSocketClient.h; sourceTree = "<group>"; };
04E038DC2F20C420002CA5A0 /* VoiceChatWebSocketClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VoiceChatWebSocketClient.m; sourceTree = "<group>"; };
04E038E22F20E500002CA5A0 /* deepgramAPI.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = deepgramAPI.md; sourceTree = "<group>"; };
04E038E42F20E877002CA5A0 /* DeepgramStreamingManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DeepgramStreamingManager.h; sourceTree = "<group>"; };
04E038E52F20E877002CA5A0 /* DeepgramStreamingManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DeepgramStreamingManager.m; sourceTree = "<group>"; };
04E038E62F20E877002CA5A0 /* DeepgramWebSocketClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DeepgramWebSocketClient.h; sourceTree = "<group>"; };
04E038E72F20E877002CA5A0 /* DeepgramWebSocketClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DeepgramWebSocketClient.m; sourceTree = "<group>"; };
04E038ED2F21F0EC002CA5A0 /* AiVM.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AiVM.h; sourceTree = "<group>"; };
04E038EE2F21F0EC002CA5A0 /* AiVM.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AiVM.m; sourceTree = "<group>"; };
04E039422F236E75002CA5A0 /* KBChatAssistantMessageCell.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBChatAssistantMessageCell.h; sourceTree = "<group>"; };
@@ -1180,42 +1147,20 @@
0460866F2F191A5100757C95 /* VM */ = {
isa = PBXGroup;
children = (
0460869D2F19239B00757C95 /* ASRStreamClient.h */,
0460869E2F19239B00757C95 /* ASRStreamClient.m */,
0460869F2F19239B00757C95 /* AudioCaptureManager.h */,
046086A02F19239B00757C95 /* AudioCaptureManager.m */,
046086A12F19239B00757C95 /* AudioSessionManager.h */,
046086A22F19239B00757C95 /* AudioSessionManager.m */,
046086A32F19239B00757C95 /* AudioStreamPlayer.h */,
046086A42F19239B00757C95 /* AudioStreamPlayer.m */,
046086A52F19239B00757C95 /* ConversationOrchestrator.h */,
046086A62F19239B00757C95 /* ConversationOrchestrator.m */,
046086A72F19239B00757C95 /* LLMStreamClient.h */,
046086A82F19239B00757C95 /* LLMStreamClient.m */,
046086A92F19239B00757C95 /* Segmenter.h */,
046086AA2F19239B00757C95 /* Segmenter.m */,
046086AB2F19239B00757C95 /* SubtitleSync.h */,
046086AC2F19239B00757C95 /* SubtitleSync.m */,
046086AD2F19239B00757C95 /* TTSPlaybackPipeline.h */,
046086AE2F19239B00757C95 /* TTSPlaybackPipeline.m */,
046086AF2F19239B00757C95 /* TTSServiceClient.h */,
046086B02F19239B00757C95 /* TTSServiceClient.m */,
04E038D92F20C420002CA5A0 /* VoiceChatStreamingManager.h */,
04E038DA2F20C420002CA5A0 /* VoiceChatStreamingManager.m */,
04E038DB2F20C420002CA5A0 /* VoiceChatWebSocketClient.h */,
04E038DC2F20C420002CA5A0 /* VoiceChatWebSocketClient.m */,
04E038E42F20E877002CA5A0 /* DeepgramStreamingManager.h */,
04E038E52F20E877002CA5A0 /* DeepgramStreamingManager.m */,
04BBF8FE2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.h */,
04BBF8FF2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m */,
04E0B1002F300001002CA5A0 /* KBVoiceToTextManager.h */,
04E0B1012F300001002CA5A0 /* KBVoiceToTextManager.m */,
04E0B2002F300002002CA5A0 /* KBVoiceRecordManager.h */,
04E0B2012F300002002CA5A0 /* KBVoiceRecordManager.m */,
04E038E62F20E877002CA5A0 /* DeepgramWebSocketClient.h */,
04E038E72F20E877002CA5A0 /* DeepgramWebSocketClient.m */,
04E038ED2F21F0EC002CA5A0 /* AiVM.h */,
04E038EE2F21F0EC002CA5A0 /* AiVM.m */,
048FFD342F29F88E005D62AE /* AIMessageVM.h */,
048FFD352F29F88E005D62AE /* AIMessageVM.m */,
);
path = VM;
sourceTree = "<group>";
@@ -2551,7 +2496,6 @@
0498BD712EE02A41006CC1D5 /* KBForgetPwdNewPwdVC.m in Sources */,
048908EF2EBF861800FABA60 /* KBSkinSectionTitleCell.m in Sources */,
0450AAE22EF03D5100B6AF06 /* KBPerson.swift in Sources */,
04E038E82F20E877002CA5A0 /* DeepgramWebSocketClient.m in Sources */,
04E038E92F20E877002CA5A0 /* DeepgramStreamingManager.m in Sources */,
04E0B1022F300001002CA5A0 /* KBVoiceToTextManager.m in Sources */,
04E0B2022F300002002CA5A0 /* KBVoiceRecordManager.m in Sources */,
@@ -2566,14 +2510,13 @@
04122F7E2EC5FC5500EF7AB3 /* KBJfPayCell.m in Sources */,
048FFD502F2B52E7005D62AE /* AIReportVC.m in Sources */,
049FB2402EC4B6EF00FAB05D /* KBULBridgeNotification.m in Sources */,
04BBF9002F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m in Sources */,
04FC95C92EB1E4C9007BD342 /* BaseNavigationController.m in Sources */,
048908DD2EBF67EB00FABA60 /* KBSearchResultVC.m in Sources */,
05A1B2D12F5B1A2B3C4D5E60 /* KBSearchVM.m in Sources */,
05A1B2D22F5B1A2B3C4D5E60 /* KBSearchThemeModel.m in Sources */,
047C65102EBCA8DD0035E841 /* HomeRankContentVC.m in Sources */,
047C655C2EBCD0F80035E841 /* UIView+KBShadow.m in Sources */,
04E038DD2F20C420002CA5A0 /* VoiceChatStreamingManager.m in Sources */,
04E038DE2F20C420002CA5A0 /* VoiceChatWebSocketClient.m in Sources */,
04F4C0B52F33053800E8F08C /* KBSvipBenefitCell.m in Sources */,
04F4C0B62F33053800E8F08C /* KBSvipSubscribeCell.m in Sources */,
049FB2262EC3136D00FAB05D /* KBPersonInfoItemCell.m in Sources */,
@@ -2584,7 +2527,6 @@
04FC95E52EB220B5007BD342 /* UIColor+Extension.m in Sources */,
048908E02EBF73DC00FABA60 /* MySkinVC.m in Sources */,
04F4C0AA2F32274000E8F08C /* KBPayMainVC.m in Sources */,
048FFD362F29F88E005D62AE /* AIMessageVM.m in Sources */,
048908F22EC047FD00FABA60 /* KBShopHeadView.m in Sources */,
0498BD742EE02E3D006CC1D5 /* KBRegistVerEmailVC.m in Sources */,
049FB2292EC31BB000FAB05D /* KBChangeNicknamePopView.m in Sources */,
@@ -2607,16 +2549,8 @@
048FFD112F27432D005D62AE /* KBPersonaPageModel.m in Sources */,
0498BD6B2EE025FC006CC1D5 /* KBForgetPwdVC.m in Sources */,
048FFD182F2763A5005D62AE /* KBVoiceInputBar.m in Sources */,
046086B12F19239B00757C95 /* SubtitleSync.m in Sources */,
046086B22F19239B00757C95 /* TTSServiceClient.m in Sources */,
046086B32F19239B00757C95 /* AudioSessionManager.m in Sources */,
046086B42F19239B00757C95 /* LLMStreamClient.m in Sources */,
046086B52F19239B00757C95 /* Segmenter.m in Sources */,
046086B62F19239B00757C95 /* TTSPlaybackPipeline.m in Sources */,
046086B72F19239B00757C95 /* ConversationOrchestrator.m in Sources */,
046086B82F19239B00757C95 /* ASRStreamClient.m in Sources */,
046086B92F19239B00757C95 /* AudioCaptureManager.m in Sources */,
046086BA2F19239B00757C95 /* AudioStreamPlayer.m in Sources */,
048908FE2EC0CC2400FABA60 /* UIScrollView+KBEmptyView.m in Sources */,
0498BD7E2EE04F9C006CC1D5 /* KBTag.m in Sources */,
04791F922ED48010004E8522 /* KBNoticeVC.m in Sources */,

View File

@@ -1,16 +0,0 @@
//
// AIMessageVM.h
// keyBoard
//
// Created by Mac on 2026/1/28.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface AIMessageVM : NSObject
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,12 +0,0 @@
//
// AIMessageVM.m
// keyBoard
//
// Created by Mac on 2026/1/28.
//
#import "AIMessageVM.h"
@implementation AIMessageVM
@end

View File

@@ -1,51 +0,0 @@
//
// ASRStreamClient.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// ASR 流式识别客户端代理
@protocol ASRStreamClientDelegate <NSObject>
@required
/// 收到实时识别结果(部分文本)
- (void)asrClientDidReceivePartialText:(NSString *)text;
/// 收到最终识别结果
- (void)asrClientDidReceiveFinalText:(NSString *)text;
/// 识别失败
- (void)asrClientDidFail:(NSError *)error;
@end
/// ASR 流式识别客户端
/// 使用 NSURLSessionWebSocketTask 实现流式语音识别
@interface ASRStreamClient : NSObject
@property(nonatomic, weak) id<ASRStreamClientDelegate> delegate;
/// ASR 服务器 WebSocket URL
@property(nonatomic, copy) NSString *serverURL;
/// 是否已连接
@property(nonatomic, assign, readonly, getter=isConnected) BOOL connected;
/// 开始新的识别会话
/// @param sessionId 会话 ID
- (void)startWithSessionId:(NSString *)sessionId;
/// 发送 PCM 音频帧20ms / 640 bytes
/// @param pcmFrame PCM 数据
- (void)sendAudioPCMFrame:(NSData *)pcmFrame;
/// 结束当前会话,请求最终结果
- (void)finalize;
/// 取消会话
- (void)cancel;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,271 +0,0 @@
//
// ASRStreamClient.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "ASRStreamClient.h"
#import "AudioCaptureManager.h"
@interface ASRStreamClient () <NSURLSessionWebSocketDelegate>
@property(nonatomic, strong) NSURLSession *urlSession;
@property(nonatomic, strong) NSURLSessionWebSocketTask *webSocketTask;
@property(nonatomic, copy) NSString *currentSessionId;
@property(nonatomic, strong) dispatch_queue_t networkQueue;
@property(nonatomic, assign) BOOL connected;
@end
@implementation ASRStreamClient
- (instancetype)init {
self = [super init];
if (self) {
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.asr.network",
DISPATCH_QUEUE_SERIAL);
// TODO: ASR
_serverURL = @"wss://your-asr-server.com/ws/asr";
}
return self;
}
- (void)dealloc {
[self cancelInternal];
}
#pragma mark - Public Methods
- (void)startWithSessionId:(NSString *)sessionId {
dispatch_async(self.networkQueue, ^{
[self cancelInternal];
self.currentSessionId = sessionId;
// WebSocket
NSURL *url = [NSURL URLWithString:self.serverURL];
NSURLSessionConfiguration *config =
[NSURLSessionConfiguration defaultSessionConfiguration];
config.timeoutIntervalForRequest = 30;
config.timeoutIntervalForResource = 300;
self.urlSession = [NSURLSession sessionWithConfiguration:config
delegate:self
delegateQueue:nil];
self.webSocketTask = [self.urlSession webSocketTaskWithURL:url];
[self.webSocketTask resume];
// start
NSDictionary *startMessage = @{
@"type" : @"start",
@"sessionId" : sessionId,
@"format" : @"pcm_s16le",
@"sampleRate" : @(kAudioSampleRate),
@"channels" : @(kAudioChannels)
};
NSError *jsonError = nil;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:startMessage
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
NSString *jsonString = [[NSString alloc] initWithData:jsonData
encoding:NSUTF8StringEncoding];
NSURLSessionWebSocketMessage *message =
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
[self.webSocketTask
sendMessage:message
completionHandler:^(NSError *_Nullable error) {
if (error) {
[self reportError:error];
} else {
self.connected = YES;
[self receiveMessage];
NSLog(@"[ASRStreamClient] Started session: %@", sessionId);
}
}];
});
}
- (void)sendAudioPCMFrame:(NSData *)pcmFrame {
if (!self.connected || !self.webSocketTask) {
return;
}
dispatch_async(self.networkQueue, ^{
NSURLSessionWebSocketMessage *message =
[[NSURLSessionWebSocketMessage alloc] initWithData:pcmFrame];
[self.webSocketTask sendMessage:message
completionHandler:^(NSError *_Nullable error) {
if (error) {
NSLog(@"[ASRStreamClient] Failed to send audio frame: %@",
error.localizedDescription);
}
}];
});
}
- (void)finalize {
if (!self.connected || !self.webSocketTask) {
return;
}
dispatch_async(self.networkQueue, ^{
NSDictionary *finalizeMessage =
@{@"type" : @"finalize", @"sessionId" : self.currentSessionId ?: @""};
NSError *jsonError = nil;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:finalizeMessage
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
NSString *jsonString = [[NSString alloc] initWithData:jsonData
encoding:NSUTF8StringEncoding];
NSURLSessionWebSocketMessage *message =
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
[self.webSocketTask sendMessage:message
completionHandler:^(NSError *_Nullable error) {
if (error) {
[self reportError:error];
} else {
NSLog(@"[ASRStreamClient] Sent finalize for session: %@",
self.currentSessionId);
}
}];
});
}
- (void)cancel {
dispatch_async(self.networkQueue, ^{
[self cancelInternal];
});
}
#pragma mark - Private Methods
- (void)cancelInternal {
self.connected = NO;
if (self.webSocketTask) {
[self.webSocketTask cancel];
self.webSocketTask = nil;
}
if (self.urlSession) {
[self.urlSession invalidateAndCancel];
self.urlSession = nil;
}
self.currentSessionId = nil;
}
- (void)receiveMessage {
if (!self.webSocketTask) {
return;
}
__weak typeof(self) weakSelf = self;
[self.webSocketTask receiveMessageWithCompletionHandler:^(
NSURLSessionWebSocketMessage *_Nullable message,
NSError *_Nullable error) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf)
return;
if (error) {
//
if (error.code != 57 && error.code != NSURLErrorCancelled) {
[strongSelf reportError:error];
}
return;
}
if (message.type == NSURLSessionWebSocketMessageTypeString) {
[strongSelf handleTextMessage:message.string];
}
//
[strongSelf receiveMessage];
}];
}
- (void)handleTextMessage:(NSString *)text {
NSData *data = [text dataUsingEncoding:NSUTF8StringEncoding];
NSError *jsonError = nil;
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data
options:0
error:&jsonError];
if (jsonError) {
NSLog(@"[ASRStreamClient] Failed to parse message: %@", text);
return;
}
NSString *type = json[@"type"];
if ([type isEqualToString:@"partial"]) {
NSString *partialText = json[@"text"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(asrClientDidReceivePartialText:)]) {
[self.delegate asrClientDidReceivePartialText:partialText];
}
});
} else if ([type isEqualToString:@"final"]) {
NSString *finalText = json[@"text"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(asrClientDidReceiveFinalText:)]) {
[self.delegate asrClientDidReceiveFinalText:finalText];
}
});
//
[self cancelInternal];
} else if ([type isEqualToString:@"error"]) {
NSInteger code = [json[@"code"] integerValue];
NSString *message = json[@"message"] ?: @"Unknown error";
NSError *error =
[NSError errorWithDomain:@"ASRStreamClient"
code:code
userInfo:@{NSLocalizedDescriptionKey : message}];
[self reportError:error];
}
}
- (void)reportError:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(asrClientDidFail:)]) {
[self.delegate asrClientDidFail:error];
}
});
}
#pragma mark - NSURLSessionWebSocketDelegate
- (void)URLSession:(NSURLSession *)session
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
didOpenWithProtocol:(NSString *)protocol {
NSLog(@"[ASRStreamClient] WebSocket connected with protocol: %@", protocol);
}
- (void)URLSession:(NSURLSession *)session
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
didCloseWithCode:(NSURLSessionWebSocketCloseCode)closeCode
reason:(NSData *)reason {
NSLog(@"[ASRStreamClient] WebSocket closed with code: %ld", (long)closeCode);
self.connected = NO;
}
@end

View File

@@ -1,63 +0,0 @@
//
// AudioStreamPlayer.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// 流式音频播放器代理
@protocol AudioStreamPlayerDelegate <NSObject>
@optional
/// 开始播放片段
- (void)audioStreamPlayerDidStartSegment:(NSString *)segmentId;
/// 播放时间更新
- (void)audioStreamPlayerDidUpdateTime:(NSTimeInterval)time
segmentId:(NSString *)segmentId;
/// 片段播放完成
- (void)audioStreamPlayerDidFinishSegment:(NSString *)segmentId;
@end
/// PCM 流式播放器
/// 使用 AVAudioEngine + AVAudioPlayerNode 实现低延迟播放
@interface AudioStreamPlayer : NSObject
@property(nonatomic, weak) id<AudioStreamPlayerDelegate> delegate;
/// 是否正在播放
@property(nonatomic, assign, readonly, getter=isPlaying) BOOL playing;
/// 启动播放器
/// @param error 错误信息
/// @return 是否启动成功
- (BOOL)start:(NSError **)error;
/// 停止播放器
- (void)stop;
/// 入队 PCM 数据块
/// @param pcmData PCM Int16 数据
/// @param sampleRate 采样率
/// @param channels 通道数
/// @param segmentId 片段 ID
- (void)enqueuePCMChunk:(NSData *)pcmData
sampleRate:(double)sampleRate
channels:(int)channels
segmentId:(NSString *)segmentId;
/// 获取片段的当前播放时间
/// @param segmentId 片段 ID
/// @return 当前时间(秒)
- (NSTimeInterval)playbackTimeForSegment:(NSString *)segmentId;
/// 获取片段的总时长
/// @param segmentId 片段 ID
/// @return 总时长(秒)
- (NSTimeInterval)durationForSegment:(NSString *)segmentId;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,246 +0,0 @@
//
// AudioStreamPlayer.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "AudioStreamPlayer.h"
#import <AVFoundation/AVFoundation.h>
@interface AudioStreamPlayer ()
@property(nonatomic, strong) AVAudioEngine *audioEngine;
@property(nonatomic, strong) AVAudioPlayerNode *playerNode;
@property(nonatomic, strong) AVAudioFormat *playbackFormat;
//
@property(nonatomic, copy) NSString *currentSegmentId;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSNumber *> *segmentDurations;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSNumber *> *segmentStartTimes;
@property(nonatomic, assign) NSUInteger scheduledSamples;
@property(nonatomic, assign) NSUInteger playedSamples;
//
@property(nonatomic, assign) BOOL playing;
@property(nonatomic, strong) dispatch_queue_t playerQueue;
@property(nonatomic, strong) NSTimer *progressTimer;
@end
@implementation AudioStreamPlayer
- (instancetype)init {
self = [super init];
if (self) {
_audioEngine = [[AVAudioEngine alloc] init];
_playerNode = [[AVAudioPlayerNode alloc] init];
_segmentDurations = [[NSMutableDictionary alloc] init];
_segmentStartTimes = [[NSMutableDictionary alloc] init];
_playerQueue = dispatch_queue_create("com.keyboard.aitalk.streamplayer",
DISPATCH_QUEUE_SERIAL);
// 16kHz, Mono, Float32
_playbackFormat =
[[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
sampleRate:16000
channels:1
interleaved:NO];
}
return self;
}
- (void)dealloc {
[self stop];
}
#pragma mark - Public Methods
- (BOOL)start:(NSError **)error {
if (self.playing) {
return YES;
}
//
[self.audioEngine attachNode:self.playerNode];
[self.audioEngine connect:self.playerNode
to:self.audioEngine.mainMixerNode
format:self.playbackFormat];
//
NSError *startError = nil;
[self.audioEngine prepare];
if (![self.audioEngine startAndReturnError:&startError]) {
if (error) {
*error = startError;
}
NSLog(@"[AudioStreamPlayer] Failed to start engine: %@",
startError.localizedDescription);
return NO;
}
[self.playerNode play];
self.playing = YES;
//
[self startProgressTimer];
NSLog(@"[AudioStreamPlayer] Started");
return YES;
}
- (void)stop {
dispatch_async(self.playerQueue, ^{
[self stopProgressTimer];
[self.playerNode stop];
[self.audioEngine stop];
self.playing = NO;
self.currentSegmentId = nil;
self.scheduledSamples = 0;
self.playedSamples = 0;
[self.segmentDurations removeAllObjects];
[self.segmentStartTimes removeAllObjects];
NSLog(@"[AudioStreamPlayer] Stopped");
});
}
- (void)enqueuePCMChunk:(NSData *)pcmData
sampleRate:(double)sampleRate
channels:(int)channels
segmentId:(NSString *)segmentId {
if (!pcmData || pcmData.length == 0)
return;
dispatch_async(self.playerQueue, ^{
//
BOOL isNewSegment = ![segmentId isEqualToString:self.currentSegmentId];
if (isNewSegment) {
self.currentSegmentId = segmentId;
self.scheduledSamples = 0;
self.segmentStartTimes[segmentId] = @(CACurrentMediaTime());
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(audioStreamPlayerDidStartSegment:)]) {
[self.delegate audioStreamPlayerDidStartSegment:segmentId];
}
});
}
// Int16 -> Float32
NSUInteger sampleCount = pcmData.length / sizeof(int16_t);
const int16_t *int16Samples = (const int16_t *)pcmData.bytes;
// buffer
AVAudioFormat *format =
[[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
sampleRate:sampleRate
channels:channels
interleaved:NO];
AVAudioPCMBuffer *buffer = [[AVAudioPCMBuffer alloc]
initWithPCMFormat:format
frameCapacity:(AVAudioFrameCount)sampleCount];
buffer.frameLength = (AVAudioFrameCount)sampleCount;
float *floatChannel = buffer.floatChannelData[0];
for (NSUInteger i = 0; i < sampleCount; i++) {
floatChannel[i] = (float)int16Samples[i] / 32768.0f;
}
//
__weak typeof(self) weakSelf = self;
[self.playerNode scheduleBuffer:buffer
completionHandler:^{
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf)
return;
dispatch_async(strongSelf.playerQueue, ^{
strongSelf.playedSamples += sampleCount;
});
}];
self.scheduledSamples += sampleCount;
//
NSTimeInterval chunkDuration = (double)sampleCount / sampleRate;
NSNumber *currentDuration = self.segmentDurations[segmentId];
self.segmentDurations[segmentId] =
@(currentDuration.doubleValue + chunkDuration);
});
}
- (NSTimeInterval)playbackTimeForSegment:(NSString *)segmentId {
if (![segmentId isEqualToString:self.currentSegmentId]) {
return 0;
}
//
return (double)self.playedSamples / self.playbackFormat.sampleRate;
}
- (NSTimeInterval)durationForSegment:(NSString *)segmentId {
NSNumber *duration = self.segmentDurations[segmentId];
return duration ? duration.doubleValue : 0;
}
#pragma mark - Progress Timer
- (void)startProgressTimer {
dispatch_async(dispatch_get_main_queue(), ^{
self.progressTimer =
[NSTimer scheduledTimerWithTimeInterval:1.0 / 30.0
target:self
selector:@selector(updateProgress)
userInfo:nil
repeats:YES];
});
}
- (void)stopProgressTimer {
dispatch_async(dispatch_get_main_queue(), ^{
[self.progressTimer invalidate];
self.progressTimer = nil;
});
}
- (void)updateProgress {
if (!self.playing || !self.currentSegmentId) {
return;
}
NSTimeInterval currentTime =
[self playbackTimeForSegment:self.currentSegmentId];
NSString *segmentId = self.currentSegmentId;
if ([self.delegate respondsToSelector:@selector
(audioStreamPlayerDidUpdateTime:segmentId:)]) {
[self.delegate audioStreamPlayerDidUpdateTime:currentTime
segmentId:segmentId];
}
//
NSTimeInterval duration = [self durationForSegment:segmentId];
if (duration > 0 && currentTime >= duration - 0.1) {
//
dispatch_async(self.playerQueue, ^{
if ([self.delegate respondsToSelector:@selector
(audioStreamPlayerDidFinishSegment:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate audioStreamPlayerDidFinishSegment:segmentId];
});
}
});
}
}
@end

View File

@@ -1,88 +0,0 @@
//
// ConversationOrchestrator.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// 对话状态
typedef NS_ENUM(NSInteger, ConversationState) {
ConversationStateIdle = 0, // 空闲
ConversationStateListening, // 正在录音
ConversationStateRecognizing, // 正在识别(等待 ASR 结果)
ConversationStateThinking, // 正在思考(等待 LLM 回复)
ConversationStateSpeaking // 正在播报 TTS
};
/// 对话编排器
/// 核心状态机,串联所有模块,处理打断逻辑
@interface ConversationOrchestrator : NSObject
/// 当前状态
@property(nonatomic, assign, readonly) ConversationState state;
/// 当前对话 ID
@property(nonatomic, copy, readonly, nullable) NSString *conversationId;
#pragma mark - Callbacks
/// 用户最终识别文本回调
@property(nonatomic, copy, nullable) void (^onUserFinalText)(NSString *text);
/// AI 可见文本回调(打字机效果)
@property(nonatomic, copy, nullable) void (^onAssistantVisibleText)
(NSString *text);
/// AI 完整回复文本回调
@property(nonatomic, copy, nullable) void (^onAssistantFullText)(NSString *text)
;
/// 实时识别文本回调(部分结果)
@property(nonatomic, copy, nullable) void (^onPartialText)(NSString *text);
/// 音量更新回调(用于波形 UI
@property(nonatomic, copy, nullable) void (^onVolumeUpdate)(float rms);
/// 状态变化回调
@property(nonatomic, copy, nullable) void (^onStateChange)
(ConversationState state);
/// 错误回调
@property(nonatomic, copy, nullable) void (^onError)(NSError *error);
/// AI 开始说话回调
@property(nonatomic, copy, nullable) void (^onSpeakingStart)(void);
/// AI 说话结束回调
@property(nonatomic, copy, nullable) void (^onSpeakingEnd)(void);
#pragma mark - Configuration
/// ASR 服务器 URL
@property(nonatomic, copy) NSString *asrServerURL;
/// LLM 服务器 URL
@property(nonatomic, copy) NSString *llmServerURL;
/// TTS 服务器 URL
@property(nonatomic, copy) NSString *ttsServerURL;
#pragma mark - User Actions
/// 用户按下录音按钮
/// 如果当前正在播放,会自动打断
- (void)userDidPressRecord;
/// 用户松开录音按钮
- (void)userDidReleaseRecord;
/// 手动停止(退出页面等)
- (void)stop;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,532 +0,0 @@
//
// ConversationOrchestrator.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "ConversationOrchestrator.h"
#import "ASRStreamClient.h"
#import "AudioCaptureManager.h"
#import "AudioSessionManager.h"
#import "LLMStreamClient.h"
#import "Segmenter.h"
#import "SubtitleSync.h"
#import "TTSPlaybackPipeline.h"
#import "TTSServiceClient.h"
@interface ConversationOrchestrator () <
AudioSessionManagerDelegate, AudioCaptureManagerDelegate,
ASRStreamClientDelegate, LLMStreamClientDelegate, TTSServiceClientDelegate,
TTSPlaybackPipelineDelegate>
//
@property(nonatomic, strong) AudioSessionManager *audioSession;
@property(nonatomic, strong) AudioCaptureManager *audioCapture;
@property(nonatomic, strong) ASRStreamClient *asrClient;
@property(nonatomic, strong) LLMStreamClient *llmClient;
@property(nonatomic, strong) Segmenter *segmenter;
@property(nonatomic, strong) TTSServiceClient *ttsClient;
@property(nonatomic, strong) TTSPlaybackPipeline *playbackPipeline;
@property(nonatomic, strong) SubtitleSync *subtitleSync;
//
@property(nonatomic, assign) ConversationState state;
@property(nonatomic, copy) NSString *conversationId;
@property(nonatomic, copy) NSString *currentSessionId;
//
@property(nonatomic, strong) NSMutableString *fullAssistantText;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSString *> *segmentTextMap;
@property(nonatomic, assign) NSInteger segmentCounter;
//
@property(nonatomic, strong) dispatch_queue_t orchestratorQueue;
@end
@implementation ConversationOrchestrator
#pragma mark - Initialization
- (instancetype)init {
self = [super init];
if (self) {
_orchestratorQueue = dispatch_queue_create(
"com.keyboard.aitalk.orchestrator", DISPATCH_QUEUE_SERIAL);
_state = ConversationStateIdle;
_conversationId = [[NSUUID UUID] UUIDString];
_fullAssistantText = [[NSMutableString alloc] init];
_segmentTextMap = [[NSMutableDictionary alloc] init];
_segmentCounter = 0;
[self setupModules];
}
return self;
}
- (void)setupModules {
// Audio Session
self.audioSession = [AudioSessionManager sharedManager];
self.audioSession.delegate = self;
// Audio Capture
self.audioCapture = [[AudioCaptureManager alloc] init];
self.audioCapture.delegate = self;
// ASR Client
self.asrClient = [[ASRStreamClient alloc] init];
self.asrClient.delegate = self;
// LLM Client
self.llmClient = [[LLMStreamClient alloc] init];
self.llmClient.delegate = self;
// Segmenter
self.segmenter = [[Segmenter alloc] init];
// TTS Client
self.ttsClient = [[TTSServiceClient alloc] init];
self.ttsClient.delegate = self;
// ElevenLabs
self.ttsClient.voiceId = @"JBFqnCBsd6RMkjVDRZzb"; // George
self.ttsClient.languageCode = @"zh"; //
self.ttsClient.expectedPayloadType =
TTSPayloadTypeURL; // 使 URL
// Playback Pipeline
self.playbackPipeline = [[TTSPlaybackPipeline alloc] init];
self.playbackPipeline.delegate = self;
// Subtitle Sync
self.subtitleSync = [[SubtitleSync alloc] init];
}
#pragma mark - Configuration Setters
- (void)setAsrServerURL:(NSString *)asrServerURL {
_asrServerURL = [asrServerURL copy];
self.asrClient.serverURL = asrServerURL;
}
- (void)setLlmServerURL:(NSString *)llmServerURL {
_llmServerURL = [llmServerURL copy];
self.llmClient.serverURL = llmServerURL;
}
- (void)setTtsServerURL:(NSString *)ttsServerURL {
_ttsServerURL = [ttsServerURL copy];
self.ttsClient.serverURL = ttsServerURL;
}
#pragma mark - User Actions
- (void)userDidPressRecord {
dispatch_async(self.orchestratorQueue, ^{
NSLog(@"[Orchestrator] userDidPressRecord, current state: %ld",
(long)self.state);
//
if (self.state == ConversationStateSpeaking ||
self.state == ConversationStateThinking) {
[self performBargein];
}
//
if (![self.audioSession hasMicrophonePermission]) {
[self.audioSession requestMicrophonePermission:^(BOOL granted) {
if (granted) {
dispatch_async(self.orchestratorQueue, ^{
[self startRecording];
});
}
}];
return;
}
[self startRecording];
});
}
- (void)userDidReleaseRecord {
dispatch_async(self.orchestratorQueue, ^{
NSLog(@"[Orchestrator] userDidReleaseRecord, current state: %ld",
(long)self.state);
if (self.state != ConversationStateListening) {
return;
}
//
[self.audioCapture stopCapture];
// ASR
[self.asrClient finalize];
//
[self updateState:ConversationStateRecognizing];
});
}
- (void)stop {
dispatch_async(self.orchestratorQueue, ^{
[self cancelAll];
[self updateState:ConversationStateIdle];
});
}
#pragma mark - Private: Recording
- (void)startRecording {
//
NSError *error = nil;
if (![self.audioSession configureForConversation:&error]) {
[self reportError:error];
return;
}
if (![self.audioSession activateSession:&error]) {
[self reportError:error];
return;
}
// ID
self.currentSessionId = [[NSUUID UUID] UUIDString];
// ASR
[self.asrClient startWithSessionId:self.currentSessionId];
//
if (![self.audioCapture startCapture:&error]) {
[self reportError:error];
[self.asrClient cancel];
return;
}
//
[self updateState:ConversationStateListening];
}
#pragma mark - Private: Barge-in ()
- (void)performBargein {
NSLog(@"[Orchestrator] Performing barge-in");
//
[self.ttsClient cancel];
[self.llmClient cancel];
[self.asrClient cancel];
//
[self.playbackPipeline stop];
//
[self.segmenter reset];
[self.segmentTextMap removeAllObjects];
[self.fullAssistantText setString:@""];
self.segmentCounter = 0;
}
- (void)cancelAll {
[self.audioCapture stopCapture];
[self.asrClient cancel];
[self.llmClient cancel];
[self.ttsClient cancel];
[self.playbackPipeline stop];
[self.segmenter reset];
[self.audioSession deactivateSession];
}
#pragma mark - Private: State Management
- (void)updateState:(ConversationState)newState {
if (self.state == newState)
return;
ConversationState oldState = self.state;
self.state = newState;
NSLog(@"[Orchestrator] State: %ld -> %ld", (long)oldState, (long)newState);
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onStateChange) {
self.onStateChange(newState);
}
//
if (newState == ConversationStateSpeaking &&
oldState != ConversationStateSpeaking) {
if (self.onSpeakingStart) {
self.onSpeakingStart();
}
}
if (oldState == ConversationStateSpeaking &&
newState != ConversationStateSpeaking) {
if (self.onSpeakingEnd) {
self.onSpeakingEnd();
}
}
});
}
- (void)reportError:(NSError *)error {
NSLog(@"[Orchestrator] Error: %@", error.localizedDescription);
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onError) {
self.onError(error);
}
});
}
#pragma mark - AudioCaptureManagerDelegate
- (void)audioCaptureManagerDidOutputPCMFrame:(NSData *)pcmFrame {
// ASR
[self.asrClient sendAudioPCMFrame:pcmFrame];
}
- (void)audioCaptureManagerDidUpdateRMS:(float)rms {
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onVolumeUpdate) {
self.onVolumeUpdate(rms);
}
});
}
#pragma mark - AudioSessionManagerDelegate
- (void)audioSessionManagerDidInterrupt:(KBAudioSessionInterruptionType)type {
dispatch_async(self.orchestratorQueue, ^{
if (type == KBAudioSessionInterruptionTypeBegan) {
//
[self cancelAll];
[self updateState:ConversationStateIdle];
}
});
}
- (void)audioSessionManagerMicrophonePermissionDenied {
NSError *error =
[NSError errorWithDomain:@"ConversationOrchestrator"
code:-1
userInfo:@{
NSLocalizedDescriptionKey : @"请在设置中开启麦克风权限"
}];
[self reportError:error];
}
#pragma mark - ASRStreamClientDelegate
- (void)asrClientDidReceivePartialText:(NSString *)text {
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onPartialText) {
self.onPartialText(text);
}
});
}
- (void)asrClientDidReceiveFinalText:(NSString *)text {
dispatch_async(self.orchestratorQueue, ^{
NSLog(@"[Orchestrator] ASR final text: %@", text);
//
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onUserFinalText) {
self.onUserFinalText(text);
}
});
//
if (text.length == 0) {
[self updateState:ConversationStateIdle];
return;
}
// LLM
[self updateState:ConversationStateThinking];
//
[self.fullAssistantText setString:@""];
[self.segmentTextMap removeAllObjects];
self.segmentCounter = 0;
[self.segmenter reset];
// 线
NSError *error = nil;
if (![self.playbackPipeline start:&error]) {
NSLog(@"[Orchestrator] Failed to start playback pipeline: %@",
error.localizedDescription);
}
// LLM
[self.llmClient sendUserText:text conversationId:self.conversationId];
});
}
- (void)asrClientDidFail:(NSError *)error {
dispatch_async(self.orchestratorQueue, ^{
[self reportError:error];
[self updateState:ConversationStateIdle];
});
}
#pragma mark - LLMStreamClientDelegate
- (void)llmClientDidReceiveToken:(NSString *)token {
dispatch_async(self.orchestratorQueue, ^{
//
[self.fullAssistantText appendString:token];
//
[self.segmenter appendToken:token];
// TTS
NSArray<NSString *> *segments = [self.segmenter popReadySegments];
for (NSString *segmentText in segments) {
[self requestTTSForSegment:segmentText];
}
});
}
- (void)llmClientDidComplete {
dispatch_async(self.orchestratorQueue, ^{
NSLog(@"[Orchestrator] LLM complete");
//
NSString *remaining = [self.segmenter flushRemainingSegment];
if (remaining && remaining.length > 0) {
[self requestTTSForSegment:remaining];
}
//
NSString *fullText = [self.fullAssistantText copy];
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onAssistantFullText) {
self.onAssistantFullText(fullText);
}
});
});
}
- (void)llmClientDidFail:(NSError *)error {
dispatch_async(self.orchestratorQueue, ^{
[self reportError:error];
[self updateState:ConversationStateIdle];
});
}
#pragma mark - Private: TTS Request
- (void)requestTTSForSegment:(NSString *)segmentText {
NSString *segmentId =
[NSString stringWithFormat:@"seg_%ld", (long)self.segmentCounter++];
//
self.segmentTextMap[segmentId] = segmentText;
NSLog(@"[Orchestrator] Requesting TTS for segment %@: %@", segmentId,
segmentText);
// TTS
[self.ttsClient requestTTSForText:segmentText segmentId:segmentId];
}
#pragma mark - TTSServiceClientDelegate
- (void)ttsClientDidReceiveURL:(NSURL *)url segmentId:(NSString *)segmentId {
dispatch_async(self.orchestratorQueue, ^{
[self.playbackPipeline enqueueURL:url segmentId:segmentId];
// Thinking Speaking
if (self.state == ConversationStateThinking) {
[self updateState:ConversationStateSpeaking];
}
});
}
- (void)ttsClientDidReceiveAudioChunk:(NSData *)chunk
payloadType:(TTSPayloadType)type
segmentId:(NSString *)segmentId {
dispatch_async(self.orchestratorQueue, ^{
[self.playbackPipeline enqueueChunk:chunk
payloadType:type
segmentId:segmentId];
// Thinking Speaking
if (self.state == ConversationStateThinking) {
[self updateState:ConversationStateSpeaking];
}
});
}
- (void)ttsClientDidFinishSegment:(NSString *)segmentId {
dispatch_async(self.orchestratorQueue, ^{
[self.playbackPipeline markSegmentComplete:segmentId];
});
}
- (void)ttsClientDidFail:(NSError *)error {
dispatch_async(self.orchestratorQueue, ^{
[self reportError:error];
});
}
#pragma mark - TTSPlaybackPipelineDelegate
- (void)pipelineDidStartSegment:(NSString *)segmentId
duration:(NSTimeInterval)duration {
NSLog(@"[Orchestrator] Started playing segment: %@", segmentId);
}
- (void)pipelineDidUpdatePlaybackTime:(NSTimeInterval)time
segmentId:(NSString *)segmentId {
dispatch_async(self.orchestratorQueue, ^{
//
NSString *segmentText = self.segmentTextMap[segmentId];
if (!segmentText)
return;
//
NSTimeInterval duration =
[self.playbackPipeline durationForSegment:segmentId];
NSString *visibleText =
[self.subtitleSync visibleTextForFullText:segmentText
currentTime:time
duration:duration];
// TODO:
//
dispatch_async(dispatch_get_main_queue(), ^{
if (self.onAssistantVisibleText) {
self.onAssistantVisibleText(visibleText);
}
});
});
}
- (void)pipelineDidFinishSegment:(NSString *)segmentId {
NSLog(@"[Orchestrator] Finished playing segment: %@", segmentId);
}
- (void)pipelineDidFinishAllSegments {
dispatch_async(self.orchestratorQueue, ^{
NSLog(@"[Orchestrator] All segments finished");
//
[self updateState:ConversationStateIdle];
[self.audioSession deactivateSession];
});
}
- (void)pipelineDidFail:(NSError *)error {
dispatch_async(self.orchestratorQueue, ^{
[self reportError:error];
[self updateState:ConversationStateIdle];
});
}
@end

View File

@@ -1,48 +0,0 @@
//
// LLMStreamClient.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// LLM 流式生成客户端代理
@protocol LLMStreamClientDelegate <NSObject>
@required
/// 收到新的 token
- (void)llmClientDidReceiveToken:(NSString *)token;
/// 生成完成
- (void)llmClientDidComplete;
/// 生成失败
- (void)llmClientDidFail:(NSError *)error;
@end
/// LLM 流式生成客户端
/// 支持 SSEServer-Sent Events或 WebSocket 接收 token 流
@interface LLMStreamClient : NSObject
@property(nonatomic, weak) id<LLMStreamClientDelegate> delegate;
/// LLM 服务器 URL
@property(nonatomic, copy) NSString *serverURL;
/// API Key如需要
@property(nonatomic, copy, nullable) NSString *apiKey;
/// 是否正在生成
@property(nonatomic, assign, readonly, getter=isGenerating) BOOL generating;
/// 发送用户文本请求 LLM 回复
/// @param text 用户输入的文本
/// @param conversationId 对话 ID
- (void)sendUserText:(NSString *)text conversationId:(NSString *)conversationId;
/// 取消当前请求
- (void)cancel;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,244 +0,0 @@
//
// LLMStreamClient.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "LLMStreamClient.h"
@interface LLMStreamClient () <NSURLSessionDataDelegate>
@property(nonatomic, strong) NSURLSession *urlSession;
@property(nonatomic, strong) NSURLSessionDataTask *dataTask;
@property(nonatomic, strong) dispatch_queue_t networkQueue;
@property(nonatomic, assign) BOOL generating;
@property(nonatomic, strong) NSMutableString *buffer; // SSE
@end
@implementation LLMStreamClient
- (instancetype)init {
self = [super init];
if (self) {
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.llm.network",
DISPATCH_QUEUE_SERIAL);
_buffer = [[NSMutableString alloc] init];
// TODO: LLM
_serverURL = @"https://your-llm-server.com/api/chat/stream";
}
return self;
}
- (void)dealloc {
[self cancel];
}
#pragma mark - Public Methods
- (void)sendUserText:(NSString *)text
conversationId:(NSString *)conversationId {
dispatch_async(self.networkQueue, ^{
[self cancelInternal];
self.generating = YES;
[self.buffer setString:@""];
//
NSURL *url = [NSURL URLWithString:self.serverURL];
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"POST";
[request setValue:@"application/json" forHTTPHeaderField:@"Content-Type"];
[request setValue:@"text/event-stream" forHTTPHeaderField:@"Accept"];
if (self.apiKey) {
[request setValue:[NSString stringWithFormat:@"Bearer %@", self.apiKey]
forHTTPHeaderField:@"Authorization"];
}
//
NSDictionary *body = @{
@"message" : text,
@"conversationId" : conversationId,
@"stream" : @YES
};
NSError *jsonError = nil;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:body
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
request.HTTPBody = jsonData;
//
NSURLSessionConfiguration *config =
[NSURLSessionConfiguration defaultSessionConfiguration];
config.timeoutIntervalForRequest = 60;
config.timeoutIntervalForResource = 300;
self.urlSession = [NSURLSession sessionWithConfiguration:config
delegate:self
delegateQueue:nil];
self.dataTask = [self.urlSession dataTaskWithRequest:request];
[self.dataTask resume];
NSLog(@"[LLMStreamClient] Started request for conversation: %@",
conversationId);
});
}
- (void)cancel {
dispatch_async(self.networkQueue, ^{
[self cancelInternal];
});
}
#pragma mark - Private Methods
- (void)cancelInternal {
self.generating = NO;
if (self.dataTask) {
[self.dataTask cancel];
self.dataTask = nil;
}
if (self.urlSession) {
[self.urlSession invalidateAndCancel];
self.urlSession = nil;
}
[self.buffer setString:@""];
}
- (void)reportError:(NSError *)error {
self.generating = NO;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(llmClientDidFail:)]) {
[self.delegate llmClientDidFail:error];
}
});
}
- (void)reportComplete {
self.generating = NO;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(llmClientDidComplete)]) {
[self.delegate llmClientDidComplete];
}
});
}
- (void)reportToken:(NSString *)token {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(llmClientDidReceiveToken:)]) {
[self.delegate llmClientDidReceiveToken:token];
}
});
}
#pragma mark - SSE Parsing
- (void)parseSSEData:(NSData *)data {
NSString *string = [[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding];
if (!string)
return;
[self.buffer appendString:string];
// SSE \n\n
NSArray *events = [self.buffer componentsSeparatedByString:@"\n\n"];
//
if (events.count > 1) {
[self.buffer setString:events.lastObject];
for (NSUInteger i = 0; i < events.count - 1; i++) {
[self handleSSEEvent:events[i]];
}
}
}
- (void)handleSSEEvent:(NSString *)event {
if (event.length == 0)
return;
// SSE
// : data: {...}
NSArray *lines = [event componentsSeparatedByString:@"\n"];
for (NSString *line in lines) {
if ([line hasPrefix:@"data: "]) {
NSString *dataString = [line substringFromIndex:6];
//
if ([dataString isEqualToString:@"[DONE]"]) {
[self reportComplete];
return;
}
// JSON
NSData *jsonData = [dataString dataUsingEncoding:NSUTF8StringEncoding];
NSError *jsonError = nil;
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:jsonData
options:0
error:&jsonError];
if (jsonError) {
NSLog(@"[LLMStreamClient] Failed to parse SSE data: %@", dataString);
continue;
}
// token API
// : {"token": "..."} {"choices": [{"delta": {"content":
// "..."}}]}
NSString *token = json[@"token"];
if (!token) {
// OpenAI
NSArray *choices = json[@"choices"];
if (choices.count > 0) {
NSDictionary *delta = choices[0][@"delta"];
token = delta[@"content"];
}
}
if (token && token.length > 0) {
[self reportToken:token];
}
}
}
}
#pragma mark - NSURLSessionDataDelegate
- (void)URLSession:(NSURLSession *)session
dataTask:(NSURLSessionDataTask *)dataTask
didReceiveData:(NSData *)data {
[self parseSSEData:data];
}
- (void)URLSession:(NSURLSession *)session
task:(NSURLSessionTask *)task
didCompleteWithError:(NSError *)error {
if (error) {
if (error.code != NSURLErrorCancelled) {
[self reportError:error];
}
} else {
//
if (self.buffer.length > 0) {
[self handleSSEEvent:self.buffer];
[self.buffer setString:@""];
}
[self reportComplete];
}
}
@end

View File

@@ -1,37 +0,0 @@
//
// Segmenter.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// 句子切分器
/// 将 LLM 输出的 token 流切分成可触发 TTS 的句子片段
@interface Segmenter : NSObject
/// 累积字符数阈值(超过此值强制切分)
/// 默认30
@property(nonatomic, assign) NSUInteger maxCharacterThreshold;
/// 追加 token
/// @param token LLM 输出的 token
- (void)appendToken:(NSString *)token;
/// 获取并移除已准备好的片段
/// @return 可立即进行 TTS 的片段数组
- (NSArray<NSString *> *)popReadySegments;
/// 获取剩余的未完成片段(用于最后 flush
/// @return 剩余片段,可能为空
- (NSString *)flushRemainingSegment;
/// 重置状态
- (void)reset;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,148 +0,0 @@
//
// Segmenter.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "Segmenter.h"
@interface Segmenter ()
@property(nonatomic, strong) NSMutableString *buffer;
@property(nonatomic, strong) NSMutableArray<NSString *> *readySegments;
@end
@implementation Segmenter
- (instancetype)init {
self = [super init];
if (self) {
_buffer = [[NSMutableString alloc] init];
_readySegments = [[NSMutableArray alloc] init];
_maxCharacterThreshold = 30;
}
return self;
}
#pragma mark - Public Methods
- (void)appendToken:(NSString *)token {
if (!token || token.length == 0) {
return;
}
[self.buffer appendString:token];
//
[self checkAndSplit];
}
- (NSArray<NSString *> *)popReadySegments {
NSArray *segments = [self.readySegments copy];
[self.readySegments removeAllObjects];
return segments;
}
- (NSString *)flushRemainingSegment {
NSString *remaining = [self.buffer copy];
[self.buffer setString:@""];
//
remaining = [remaining
stringByTrimmingCharactersInSet:[NSCharacterSet
whitespaceAndNewlineCharacterSet]];
return remaining.length > 0 ? remaining : nil;
}
- (void)reset {
[self.buffer setString:@""];
[self.readySegments removeAllObjects];
}
#pragma mark - Private Methods
- (void)checkAndSplit {
//
NSCharacterSet *sentenceEnders =
[NSCharacterSet characterSetWithCharactersInString:@"。!?\n"];
while (YES) {
NSString *currentBuffer = self.buffer;
//
NSRange range = [currentBuffer rangeOfCharacterFromSet:sentenceEnders];
if (range.location != NSNotFound) {
//
NSUInteger endIndex = range.location + 1;
NSString *segment = [currentBuffer substringToIndex:endIndex];
segment = [segment stringByTrimmingCharactersInSet:
[NSCharacterSet whitespaceAndNewlineCharacterSet]];
if (segment.length > 0) {
[self.readySegments addObject:segment];
}
//
[self.buffer deleteCharactersInRange:NSMakeRange(0, endIndex)];
} else if (currentBuffer.length >= self.maxCharacterThreshold) {
//
//
NSRange breakRange = [self findBestBreakPoint:currentBuffer];
if (breakRange.location != NSNotFound) {
NSString *segment =
[currentBuffer substringToIndex:breakRange.location + 1];
segment =
[segment stringByTrimmingCharactersInSet:
[NSCharacterSet whitespaceAndNewlineCharacterSet]];
if (segment.length > 0) {
[self.readySegments addObject:segment];
}
[self.buffer
deleteCharactersInRange:NSMakeRange(0, breakRange.location + 1)];
} else {
//
NSString *segment =
[currentBuffer substringToIndex:self.maxCharacterThreshold];
segment =
[segment stringByTrimmingCharactersInSet:
[NSCharacterSet whitespaceAndNewlineCharacterSet]];
if (segment.length > 0) {
[self.readySegments addObject:segment];
}
[self.buffer
deleteCharactersInRange:NSMakeRange(0, self.maxCharacterThreshold)];
}
} else {
//
break;
}
}
}
- (NSRange)findBestBreakPoint:(NSString *)text {
//
NSCharacterSet *breakChars =
[NSCharacterSet characterSetWithCharactersInString:@",、;;: "];
//
for (NSInteger i = text.length - 1; i >= self.maxCharacterThreshold / 2;
i--) {
unichar c = [text characterAtIndex:i];
if ([breakChars characterIsMember:c]) {
return NSMakeRange(i, 1);
}
}
return NSMakeRange(NSNotFound, 0);
}
@end

View File

@@ -1,36 +0,0 @@
//
// SubtitleSync.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// 字幕同步器
/// 根据播放进度映射文字显示,实现打字机效果
@interface SubtitleSync : NSObject
/// 获取当前应显示的文本
/// @param fullText 完整文本
/// @param currentTime 当前播放时间(秒)
/// @param duration 总时长(秒)
/// @return 应显示的部分文本(打字机效果)
- (NSString *)visibleTextForFullText:(NSString *)fullText
currentTime:(NSTimeInterval)currentTime
duration:(NSTimeInterval)duration;
/// 获取可见字符数
/// @param fullText 完整文本
/// @param currentTime 当前播放时间(秒)
/// @param duration 总时长(秒)
/// @return 应显示的字符数
- (NSUInteger)visibleCountForFullText:(NSString *)fullText
currentTime:(NSTimeInterval)currentTime
duration:(NSTimeInterval)duration;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,66 +0,0 @@
//
// SubtitleSync.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "SubtitleSync.h"
@implementation SubtitleSync
- (NSString *)visibleTextForFullText:(NSString *)fullText
currentTime:(NSTimeInterval)currentTime
duration:(NSTimeInterval)duration {
if (!fullText || fullText.length == 0) {
return @"";
}
NSUInteger visibleCount = [self visibleCountForFullText:fullText
currentTime:currentTime
duration:duration];
if (visibleCount >= fullText.length) {
return fullText;
}
return [fullText substringToIndex:visibleCount];
}
- (NSUInteger)visibleCountForFullText:(NSString *)fullText
currentTime:(NSTimeInterval)currentTime
duration:(NSTimeInterval)duration {
if (!fullText || fullText.length == 0) {
return 0;
}
//
if (duration <= 0) {
//
return fullText.length;
}
if (currentTime <= 0) {
return 0;
}
if (currentTime >= duration) {
return fullText.length;
}
//
double progress = currentTime / duration;
//
// 使
double adjustedProgress = MIN(progress * 1.05, 1.0);
NSUInteger visibleCount =
(NSUInteger)round(fullText.length * adjustedProgress);
return MIN(visibleCount, fullText.length);
}
@end

View File

@@ -1,79 +0,0 @@
//
// TTSPlaybackPipeline.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "TTSServiceClient.h"
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// 播放管线代理
@protocol TTSPlaybackPipelineDelegate <NSObject>
@optional
/// 开始播放片段
- (void)pipelineDidStartSegment:(NSString *)segmentId
duration:(NSTimeInterval)duration;
/// 播放时间更新
- (void)pipelineDidUpdatePlaybackTime:(NSTimeInterval)time
segmentId:(NSString *)segmentId;
/// 片段播放完成
- (void)pipelineDidFinishSegment:(NSString *)segmentId;
/// 所有片段播放完成
- (void)pipelineDidFinishAllSegments;
/// 播放出错
- (void)pipelineDidFail:(NSError *)error;
@end
/// TTS 播放管线
/// 根据 payloadType 路由到对应播放器
@interface TTSPlaybackPipeline : NSObject
@property(nonatomic, weak) id<TTSPlaybackPipelineDelegate> delegate;
/// 是否正在播放
@property(nonatomic, assign, readonly, getter=isPlaying) BOOL playing;
/// 当前播放的片段 ID
@property(nonatomic, copy, readonly, nullable) NSString *currentSegmentId;
/// 启动管线
/// @param error 错误信息
/// @return 是否启动成功
- (BOOL)start:(NSError **)error;
/// 停止管线(立即停止,用于打断)
- (void)stop;
/// 入队 URL 播放
/// @param url 音频 URL
/// @param segmentId 片段 ID
- (void)enqueueURL:(NSURL *)url segmentId:(NSString *)segmentId;
/// 入队音频数据块
/// @param chunk 音频数据
/// @param type 数据类型
/// @param segmentId 片段 ID
- (void)enqueueChunk:(NSData *)chunk
payloadType:(TTSPayloadType)type
segmentId:(NSString *)segmentId;
/// 标记片段数据完成(用于流式模式)
/// @param segmentId 片段 ID
- (void)markSegmentComplete:(NSString *)segmentId;
/// 获取片段的当前播放时间
/// @param segmentId 片段 ID
/// @return 当前时间(秒),如果未在播放则返回 0
- (NSTimeInterval)currentTimeForSegment:(NSString *)segmentId;
/// 获取片段的总时长
/// @param segmentId 片段 ID
/// @return 总时长(秒)
- (NSTimeInterval)durationForSegment:(NSString *)segmentId;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,343 +0,0 @@
//
// TTSPlaybackPipeline.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "TTSPlaybackPipeline.h"
#import "AudioStreamPlayer.h"
#import <AVFoundation/AVFoundation.h>
@interface TTSPlaybackPipeline () <AudioStreamPlayerDelegate>
//
@property(nonatomic, strong) AVPlayer *urlPlayer;
@property(nonatomic, strong) AudioStreamPlayer *streamPlayer;
//
@property(nonatomic, strong) NSMutableArray<NSDictionary *> *segmentQueue;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSNumber *> *segmentDurations;
//
@property(nonatomic, assign) BOOL playing;
@property(nonatomic, copy) NSString *currentSegmentId;
@property(nonatomic, strong) id playerTimeObserver;
//
@property(nonatomic, strong) dispatch_queue_t playbackQueue;
@end
@implementation TTSPlaybackPipeline
- (instancetype)init {
self = [super init];
if (self) {
_segmentQueue = [[NSMutableArray alloc] init];
_segmentDurations = [[NSMutableDictionary alloc] init];
_playbackQueue = dispatch_queue_create("com.keyboard.aitalk.playback",
DISPATCH_QUEUE_SERIAL);
}
return self;
}
- (void)dealloc {
[self stop];
}
#pragma mark - Public Methods
- (BOOL)start:(NSError **)error {
// stream player
if (!self.streamPlayer) {
self.streamPlayer = [[AudioStreamPlayer alloc] init];
self.streamPlayer.delegate = self;
}
return [self.streamPlayer start:error];
}
- (void)stop {
dispatch_async(self.playbackQueue, ^{
// URL
if (self.urlPlayer) {
[self.urlPlayer pause];
if (self.playerTimeObserver) {
[self.urlPlayer removeTimeObserver:self.playerTimeObserver];
self.playerTimeObserver = nil;
}
self.urlPlayer = nil;
}
//
[self.streamPlayer stop];
//
[self.segmentQueue removeAllObjects];
[self.segmentDurations removeAllObjects];
self.playing = NO;
self.currentSegmentId = nil;
});
}
- (void)enqueueURL:(NSURL *)url segmentId:(NSString *)segmentId {
if (!url || !segmentId)
return;
dispatch_async(self.playbackQueue, ^{
NSDictionary *segment = @{
@"type" : @(TTSPayloadTypeURL),
@"url" : url,
@"segmentId" : segmentId
};
[self.segmentQueue addObject:segment];
//
if (!self.playing) {
[self playNextSegment];
}
});
}
- (void)enqueueChunk:(NSData *)chunk
payloadType:(TTSPayloadType)type
segmentId:(NSString *)segmentId {
if (!chunk || !segmentId)
return;
dispatch_async(self.playbackQueue, ^{
switch (type) {
case TTSPayloadTypePCMChunk:
// stream player
[self.streamPlayer enqueuePCMChunk:chunk
sampleRate:16000
channels:1
segmentId:segmentId];
if (!self.playing) {
self.playing = YES;
self.currentSegmentId = segmentId;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(pipelineDidStartSegment:duration:)]) {
[self.delegate pipelineDidStartSegment:segmentId duration:0];
}
});
}
break;
case TTSPayloadTypeAACChunk:
// TODO: AAC -> PCM -> streamPlayer
NSLog(@"[TTSPlaybackPipeline] AAC chunk decoding not implemented yet");
break;
case TTSPayloadTypeOpusChunk:
// TODO: Opus -> PCM -> streamPlayer
NSLog(@"[TTSPlaybackPipeline] Opus chunk decoding not implemented yet");
break;
default:
break;
}
});
}
- (void)markSegmentComplete:(NSString *)segmentId {
// Stream player
}
- (NSTimeInterval)currentTimeForSegment:(NSString *)segmentId {
if (![segmentId isEqualToString:self.currentSegmentId]) {
return 0;
}
if (self.urlPlayer) {
return CMTimeGetSeconds(self.urlPlayer.currentTime);
}
return [self.streamPlayer playbackTimeForSegment:segmentId];
}
- (NSTimeInterval)durationForSegment:(NSString *)segmentId {
NSNumber *duration = self.segmentDurations[segmentId];
if (duration) {
return duration.doubleValue;
}
if (self.urlPlayer && [segmentId isEqualToString:self.currentSegmentId]) {
CMTime duration = self.urlPlayer.currentItem.duration;
if (CMTIME_IS_VALID(duration)) {
return CMTimeGetSeconds(duration);
}
}
return [self.streamPlayer durationForSegment:segmentId];
}
#pragma mark - Private Methods
- (void)playNextSegment {
if (self.segmentQueue.count == 0) {
self.playing = NO;
self.currentSegmentId = nil;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(pipelineDidFinishAllSegments)]) {
[self.delegate pipelineDidFinishAllSegments];
}
});
return;
}
NSDictionary *segment = self.segmentQueue.firstObject;
[self.segmentQueue removeObjectAtIndex:0];
TTSPayloadType type = [segment[@"type"] integerValue];
NSString *segmentId = segment[@"segmentId"];
self.playing = YES;
self.currentSegmentId = segmentId;
if (type == TTSPayloadTypeURL) {
NSURL *url = segment[@"url"];
[self playURL:url segmentId:segmentId];
}
}
- (void)playURL:(NSURL *)url segmentId:(NSString *)segmentId {
AVPlayerItem *item = [AVPlayerItem playerItemWithURL:url];
if (!self.urlPlayer) {
self.urlPlayer = [AVPlayer playerWithPlayerItem:item];
} else {
[self.urlPlayer replaceCurrentItemWithPlayerItem:item];
}
//
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:@selector(playerItemDidFinish:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:item];
//
__weak typeof(self) weakSelf = self;
self.playerTimeObserver = [self.urlPlayer
addPeriodicTimeObserverForInterval:CMTimeMake(1, 30)
queue:dispatch_get_main_queue()
usingBlock:^(CMTime time) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf)
return;
NSTimeInterval currentTime =
CMTimeGetSeconds(time);
if ([strongSelf.delegate
respondsToSelector:@selector
(pipelineDidUpdatePlaybackTime:
segmentId:)]) {
[strongSelf.delegate
pipelineDidUpdatePlaybackTime:currentTime
segmentId:segmentId];
}
}];
//
[item.asset
loadValuesAsynchronouslyForKeys:@[ @"duration" ]
completionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
NSTimeInterval duration =
CMTimeGetSeconds(item.duration);
if (!isnan(duration)) {
self.segmentDurations[segmentId] = @(duration);
}
if ([self.delegate respondsToSelector:@selector
(pipelineDidStartSegment:
duration:)]) {
[self.delegate pipelineDidStartSegment:segmentId
duration:duration];
}
[self.urlPlayer play];
});
}];
}
- (void)playerItemDidFinish:(NSNotification *)notification {
[[NSNotificationCenter defaultCenter]
removeObserver:self
name:AVPlayerItemDidPlayToEndTimeNotification
object:notification.object];
if (self.playerTimeObserver) {
[self.urlPlayer removeTimeObserver:self.playerTimeObserver];
self.playerTimeObserver = nil;
}
NSString *finishedSegmentId = self.currentSegmentId;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(pipelineDidFinishSegment:)]) {
[self.delegate pipelineDidFinishSegment:finishedSegmentId];
}
});
dispatch_async(self.playbackQueue, ^{
[self playNextSegment];
});
}
#pragma mark - AudioStreamPlayerDelegate
- (void)audioStreamPlayerDidStartSegment:(NSString *)segmentId {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(pipelineDidStartSegment:duration:)]) {
[self.delegate pipelineDidStartSegment:segmentId duration:0];
}
});
}
- (void)audioStreamPlayerDidUpdateTime:(NSTimeInterval)time
segmentId:(NSString *)segmentId {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(pipelineDidUpdatePlaybackTime:segmentId:)]) {
[self.delegate pipelineDidUpdatePlaybackTime:time segmentId:segmentId];
}
});
}
- (void)audioStreamPlayerDidFinishSegment:(NSString *)segmentId {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(pipelineDidFinishSegment:)]) {
[self.delegate pipelineDidFinishSegment:segmentId];
}
});
dispatch_async(self.playbackQueue, ^{
//
if (self.segmentQueue.count == 0) {
self.playing = NO;
self.currentSegmentId = nil;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(pipelineDidFinishAllSegments)]) {
[self.delegate pipelineDidFinishAllSegments];
}
});
}
});
}
@end

View File

@@ -1,66 +0,0 @@
//
// TTSServiceClient.h
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/// TTS 返回数据类型
typedef NS_ENUM(NSInteger, TTSPayloadType) {
TTSPayloadTypeURL = 0, // 模式 A返回 m4a/MP3 URL
TTSPayloadTypePCMChunk, // 模式 D返回 PCM chunk
TTSPayloadTypeAACChunk, // 模式 B返回 AAC chunk
TTSPayloadTypeOpusChunk // 模式 C返回 Opus chunk
};
/// TTS 服务客户端代理
@protocol TTSServiceClientDelegate <NSObject>
@optional
/// 收到音频 URL模式 A
- (void)ttsClientDidReceiveURL:(NSURL *)url segmentId:(NSString *)segmentId;
/// 收到音频数据块(模式 B/C/D
- (void)ttsClientDidReceiveAudioChunk:(NSData *)chunk
payloadType:(TTSPayloadType)type
segmentId:(NSString *)segmentId;
/// 片段完成
- (void)ttsClientDidFinishSegment:(NSString *)segmentId;
/// 请求失败
- (void)ttsClientDidFail:(NSError *)error;
@end
/// TTS 服务客户端
/// 统一网络层接口,支持多种 TTS 返回形态
@interface TTSServiceClient : NSObject
@property(nonatomic, weak) id<TTSServiceClientDelegate> delegate;
/// TTS 服务器 URL
@property(nonatomic, copy) NSString *serverURL;
/// 语音 IDElevenLabs voice ID
@property(nonatomic, copy) NSString *voiceId;
/// 语言代码(如 "zh", "en"
@property(nonatomic, copy) NSString *languageCode;
/// 当前期望的返回类型(由服务端配置决定)
@property(nonatomic, assign) TTSPayloadType expectedPayloadType;
/// 是否正在请求
@property(nonatomic, assign, readonly, getter=isRequesting) BOOL requesting;
/// 请求 TTS 合成
/// @param text 要合成的文本
/// @param segmentId 片段 ID用于标识和排序
- (void)requestTTSForText:(NSString *)text segmentId:(NSString *)segmentId;
/// 取消所有请求
- (void)cancel;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,302 +0,0 @@
//
// TTSServiceClient.m
// keyBoard
//
// Created by Mac on 2026/1/15.
//
#import "TTSServiceClient.h"
@interface TTSServiceClient () <NSURLSessionDataDelegate,
NSURLSessionWebSocketDelegate>
@property(nonatomic, strong) NSURLSession *urlSession;
@property(nonatomic, strong)
NSMutableDictionary<NSString *, NSURLSessionTask *> *activeTasks;
@property(nonatomic, strong) dispatch_queue_t networkQueue;
@property(nonatomic, assign) BOOL requesting;
@end
@implementation TTSServiceClient
- (instancetype)init {
self = [super init];
if (self) {
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.tts.network",
DISPATCH_QUEUE_SERIAL);
_activeTasks = [[NSMutableDictionary alloc] init];
_expectedPayloadType = TTSPayloadTypeURL; // URL
// TODO: TTS
_serverURL = @"https://your-tts-server.com/api/tts";
[self setupSession];
}
return self;
}
- (void)setupSession {
NSURLSessionConfiguration *config =
[NSURLSessionConfiguration defaultSessionConfiguration];
config.timeoutIntervalForRequest = 30;
config.timeoutIntervalForResource = 120;
self.urlSession = [NSURLSession sessionWithConfiguration:config
delegate:self
delegateQueue:nil];
}
- (void)dealloc {
[self cancel];
}
#pragma mark - Public Methods
- (void)requestTTSForText:(NSString *)text segmentId:(NSString *)segmentId {
if (!text || text.length == 0 || !segmentId) {
return;
}
dispatch_async(self.networkQueue, ^{
self.requesting = YES;
switch (self.expectedPayloadType) {
case TTSPayloadTypeURL:
[self requestURLMode:text segmentId:segmentId];
break;
case TTSPayloadTypePCMChunk:
case TTSPayloadTypeAACChunk:
case TTSPayloadTypeOpusChunk:
[self requestStreamMode:text segmentId:segmentId];
break;
}
});
}
- (void)cancel {
dispatch_async(self.networkQueue, ^{
for (NSURLSessionTask *task in self.activeTasks.allValues) {
[task cancel];
}
[self.activeTasks removeAllObjects];
self.requesting = NO;
});
}
#pragma mark - URL Mode (Mode A)
- (void)requestURLMode:(NSString *)text segmentId:(NSString *)segmentId {
NSURL *url = [NSURL URLWithString:self.serverURL];
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"POST";
[request setValue:@"application/json" forHTTPHeaderField:@"Content-Type"];
NSDictionary *body = @{
@"text" : text,
@"segmentId" : segmentId,
@"voiceId" : self.voiceId ?: @"JBFqnCBsd6RMkjVDRZzb",
@"languageCode" : self.languageCode ?: @"zh",
@"format" : @"mp3" // m4a
};
NSError *jsonError = nil;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:body
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
request.HTTPBody = jsonData;
__weak typeof(self) weakSelf = self;
NSURLSessionDataTask *task = [self.urlSession
dataTaskWithRequest:request
completionHandler:^(NSData *_Nullable data,
NSURLResponse *_Nullable response,
NSError *_Nullable error) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf)
return;
dispatch_async(strongSelf.networkQueue, ^{
[strongSelf.activeTasks removeObjectForKey:segmentId];
if (error) {
if (error.code != NSURLErrorCancelled) {
[strongSelf reportError:error];
}
return;
}
//
NSError *parseError = nil;
NSDictionary *json =
[NSJSONSerialization JSONObjectWithData:data
options:0
error:&parseError];
if (parseError) {
[strongSelf reportError:parseError];
return;
}
NSString *audioURLString = json[@"audioUrl"];
if (audioURLString) {
NSURL *audioURL = [NSURL URLWithString:audioURLString];
dispatch_async(dispatch_get_main_queue(), ^{
if ([strongSelf.delegate respondsToSelector:@selector
(ttsClientDidReceiveURL:segmentId:)]) {
[strongSelf.delegate ttsClientDidReceiveURL:audioURL
segmentId:segmentId];
}
if ([strongSelf.delegate respondsToSelector:@selector
(ttsClientDidFinishSegment:)]) {
[strongSelf.delegate ttsClientDidFinishSegment:segmentId];
}
});
}
});
}];
self.activeTasks[segmentId] = task;
[task resume];
NSLog(@"[TTSServiceClient] URL mode request for segment: %@", segmentId);
}
#pragma mark - Stream Mode (Mode B/C/D)
- (void)requestStreamMode:(NSString *)text segmentId:(NSString *)segmentId {
// WebSocket
NSString *wsURL =
[self.serverURL stringByReplacingOccurrencesOfString:@"https://"
withString:@"wss://"];
wsURL = [wsURL stringByReplacingOccurrencesOfString:@"http://"
withString:@"ws://"];
wsURL = [wsURL stringByAppendingString:@"/stream"];
NSURL *url = [NSURL URLWithString:wsURL];
NSURLSessionWebSocketTask *wsTask =
[self.urlSession webSocketTaskWithURL:url];
self.activeTasks[segmentId] = wsTask;
[wsTask resume];
//
NSDictionary *requestDict = @{
@"text" : text,
@"segmentId" : segmentId,
@"voiceId" : self.voiceId ?: @"JBFqnCBsd6RMkjVDRZzb",
@"languageCode" : self.languageCode ?: @"zh",
@"format" : [self formatStringForPayloadType:self.expectedPayloadType]
};
NSError *jsonError = nil;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:requestDict
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
NSString *jsonString = [[NSString alloc] initWithData:jsonData
encoding:NSUTF8StringEncoding];
NSURLSessionWebSocketMessage *message =
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
__weak typeof(self) weakSelf = self;
[wsTask sendMessage:message
completionHandler:^(NSError *_Nullable error) {
if (error) {
[weakSelf reportError:error];
} else {
[weakSelf receiveStreamMessage:wsTask segmentId:segmentId];
}
}];
NSLog(@"[TTSServiceClient] Stream mode request for segment: %@", segmentId);
}
- (void)receiveStreamMessage:(NSURLSessionWebSocketTask *)wsTask
segmentId:(NSString *)segmentId {
__weak typeof(self) weakSelf = self;
[wsTask receiveMessageWithCompletionHandler:^(
NSURLSessionWebSocketMessage *_Nullable message,
NSError *_Nullable error) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf)
return;
if (error) {
if (error.code != NSURLErrorCancelled && error.code != 57) {
[strongSelf reportError:error];
}
return;
}
if (message.type == NSURLSessionWebSocketMessageTypeData) {
//
dispatch_async(dispatch_get_main_queue(), ^{
if ([strongSelf.delegate respondsToSelector:@selector
(ttsClientDidReceiveAudioChunk:
payloadType:segmentId:)]) {
[strongSelf.delegate
ttsClientDidReceiveAudioChunk:message.data
payloadType:strongSelf.expectedPayloadType
segmentId:segmentId];
}
});
//
[strongSelf receiveStreamMessage:wsTask segmentId:segmentId];
} else if (message.type == NSURLSessionWebSocketMessageTypeString) {
//
NSData *data = [message.string dataUsingEncoding:NSUTF8StringEncoding];
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data
options:0
error:nil];
if ([json[@"type"] isEqualToString:@"done"]) {
dispatch_async(strongSelf.networkQueue, ^{
[strongSelf.activeTasks removeObjectForKey:segmentId];
});
dispatch_async(dispatch_get_main_queue(), ^{
if ([strongSelf.delegate
respondsToSelector:@selector(ttsClientDidFinishSegment:)]) {
[strongSelf.delegate ttsClientDidFinishSegment:segmentId];
}
});
} else {
//
[strongSelf receiveStreamMessage:wsTask segmentId:segmentId];
}
}
}];
}
- (NSString *)formatStringForPayloadType:(TTSPayloadType)type {
switch (type) {
case TTSPayloadTypePCMChunk:
return @"pcm";
case TTSPayloadTypeAACChunk:
return @"aac";
case TTSPayloadTypeOpusChunk:
return @"opus";
default:
return @"mp3";
}
}
#pragma mark - Error Reporting
- (void)reportError:(NSError *)error {
self.requesting = NO;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(ttsClientDidFail:)]) {
[self.delegate ttsClientDidFail:error];
}
});
}
@end

View File

@@ -1,53 +0,0 @@
//
// VoiceChatStreamingManager.h
// keyBoard
//
// Created by Mac on 2026/1/21.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@protocol VoiceChatStreamingManagerDelegate <NSObject>
@optional
- (void)voiceChatStreamingManagerDidConnect;
- (void)voiceChatStreamingManagerDidDisconnect:(NSError *_Nullable)error;
- (void)voiceChatStreamingManagerDidStartSession:(NSString *)sessionId;
- (void)voiceChatStreamingManagerDidStartTurn:(NSInteger)turnIndex;
- (void)voiceChatStreamingManagerDidReceiveEagerEndOfTurnWithTranscript:(NSString *)text
confidence:(double)confidence;
- (void)voiceChatStreamingManagerDidResumeTurn;
- (void)voiceChatStreamingManagerDidUpdateRMS:(float)rms;
- (void)voiceChatStreamingManagerDidReceiveInterimTranscript:(NSString *)text;
- (void)voiceChatStreamingManagerDidReceiveFinalTranscript:(NSString *)text;
- (void)voiceChatStreamingManagerDidReceiveLLMStart;
- (void)voiceChatStreamingManagerDidReceiveLLMToken:(NSString *)token;
- (void)voiceChatStreamingManagerDidReceiveAudioChunk:(NSData *)audioData;
- (void)voiceChatStreamingManagerDidCompleteWithTranscript:(NSString *)transcript
aiResponse:(NSString *)aiResponse;
- (void)voiceChatStreamingManagerDidFail:(NSError *)error;
@end
/// Manager for realtime recording and streaming.
@interface VoiceChatStreamingManager : NSObject
@property(nonatomic, weak) id<VoiceChatStreamingManagerDelegate> delegate;
/// Base WebSocket URL, e.g. wss://api.yourdomain.com/api/ws/chat
@property(nonatomic, copy) NSString *serverURL;
@property(nonatomic, assign, readonly, getter=isStreaming) BOOL streaming;
@property(nonatomic, copy, readonly, nullable) NSString *sessionId;
- (void)startWithToken:(NSString *)token
language:(nullable NSString *)language
voiceId:(nullable NSString *)voiceId;
- (void)stopAndFinalize;
- (void)cancel;
- (void)disconnect;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,380 +0,0 @@
//
// VoiceChatStreamingManager.m
// keyBoard
//
// Created by Mac on 2026/1/21.
//
#import "VoiceChatStreamingManager.h"
#import "AudioCaptureManager.h"
#import "AudioSessionManager.h"
#import "VoiceChatWebSocketClient.h"
static NSString *const kVoiceChatStreamingManagerErrorDomain =
@"VoiceChatStreamingManager";
@interface VoiceChatStreamingManager () <AudioSessionManagerDelegate,
AudioCaptureManagerDelegate,
VoiceChatWebSocketClientDelegate>
@property(nonatomic, strong) AudioSessionManager *audioSession;
@property(nonatomic, strong) AudioCaptureManager *audioCapture;
@property(nonatomic, strong) VoiceChatWebSocketClient *webSocketClient;
@property(nonatomic, strong) dispatch_queue_t stateQueue;
@property(nonatomic, assign) BOOL streaming;
@property(nonatomic, copy) NSString *sessionId;
@property(nonatomic, copy) NSString *pendingToken;
@property(nonatomic, copy) NSString *pendingLanguage;
@property(nonatomic, copy) NSString *pendingVoiceId;
@end
@implementation VoiceChatStreamingManager
- (instancetype)init {
self = [super init];
if (self) {
_stateQueue = dispatch_queue_create("com.keyboard.aitalk.voicechat.manager",
DISPATCH_QUEUE_SERIAL);
_audioSession = [AudioSessionManager sharedManager];
_audioSession.delegate = self;
_audioCapture = [[AudioCaptureManager alloc] init];
_audioCapture.delegate = self;
_webSocketClient = [[VoiceChatWebSocketClient alloc] init];
_webSocketClient.delegate = self;
_serverURL = @"ws://192.168.2.21:7529/api/ws/chat?token=";
_webSocketClient.serverURL = _serverURL;
}
return self;
}
- (void)dealloc {
[self disconnectInternal];
}
- (void)setServerURL:(NSString *)serverURL {
_serverURL = [serverURL copy];
self.webSocketClient.serverURL = _serverURL;
}
#pragma mark - Public Methods
- (void)startWithToken:(NSString *)token
language:(nullable NSString *)language
voiceId:(nullable NSString *)voiceId {
dispatch_async(self.stateQueue, ^{
self.pendingToken = token ?: @"";
self.pendingLanguage = language ?: @"";
self.pendingVoiceId = voiceId ?: @"";
[self.webSocketClient disableAudioSending];
[self startInternal];
});
}
- (void)stopAndFinalize {
dispatch_async(self.stateQueue, ^{
if (self.streaming) {
[self.audioCapture stopCapture];
self.streaming = NO;
}
[self.webSocketClient disableAudioSending];
[self.webSocketClient endAudio];
});
}
- (void)cancel {
dispatch_async(self.stateQueue, ^{
if (self.streaming) {
[self.audioCapture stopCapture];
self.streaming = NO;
}
[self.webSocketClient disableAudioSending];
[self.webSocketClient cancel];
self.sessionId = nil;
});
}
- (void)disconnect {
dispatch_async(self.stateQueue, ^{
[self disconnectInternal];
});
}
- (void)disconnectInternal {
if (self.streaming) {
[self.audioCapture stopCapture];
self.streaming = NO;
}
[self.webSocketClient disableAudioSending];
[self.webSocketClient disconnect];
[self.audioSession deactivateSession];
self.sessionId = nil;
}
#pragma mark - Private Methods
- (void)startInternal {
if (self.pendingToken.length == 0) {
NSLog(@"[VoiceChatStreamingManager] Start failed: token is empty");
[self reportErrorWithMessage:@"Token is required"];
return;
}
if (![self.audioSession hasMicrophonePermission]) {
__weak typeof(self) weakSelf = self;
[self.audioSession requestMicrophonePermission:^(BOOL granted) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf) {
return;
}
if (!granted) {
[strongSelf reportErrorWithMessage:@"Microphone permission denied"];
return;
}
dispatch_async(strongSelf.stateQueue, ^{
[strongSelf startInternal];
});
}];
return;
}
NSError *error = nil;
if (![self.audioSession configureForConversation:&error]) {
[self reportError:error];
return;
}
if (![self.audioSession activateSession:&error]) {
[self reportError:error];
return;
}
if (self.serverURL.length == 0) {
NSLog(@"[VoiceChatStreamingManager] Start failed: server URL is empty");
[self reportErrorWithMessage:@"Server URL is required"];
return;
}
NSLog(@"[VoiceChatStreamingManager] Start streaming, server: %@",
self.serverURL);
self.webSocketClient.serverURL = self.serverURL;
[self.webSocketClient connectWithToken:self.pendingToken];
}
- (void)reportError:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidFail:)]) {
[self.delegate voiceChatStreamingManagerDidFail:error];
}
});
}
- (void)reportErrorWithMessage:(NSString *)message {
NSError *error = [NSError errorWithDomain:kVoiceChatStreamingManagerErrorDomain
code:-1
userInfo:@{
NSLocalizedDescriptionKey : message ?: @""
}];
[self reportError:error];
}
#pragma mark - AudioCaptureManagerDelegate
- (void)audioCaptureManagerDidOutputPCMFrame:(NSData *)pcmFrame {
if (!self.streaming) {
return;
}
[self.webSocketClient sendAudioPCMFrame:pcmFrame];
}
- (void)audioCaptureManagerDidUpdateRMS:(float)rms {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidUpdateRMS:)]) {
[self.delegate voiceChatStreamingManagerDidUpdateRMS:rms];
}
});
}
#pragma mark - AudioSessionManagerDelegate
- (void)audioSessionManagerDidInterrupt:(KBAudioSessionInterruptionType)type {
if (type == KBAudioSessionInterruptionTypeBegan) {
[self cancel];
}
}
- (void)audioSessionManagerMicrophonePermissionDenied {
[self reportErrorWithMessage:@"Microphone permission denied"];
}
#pragma mark - VoiceChatWebSocketClientDelegate
- (void)voiceChatClientDidConnect {
dispatch_async(self.stateQueue, ^{
[self.webSocketClient startSessionWithLanguage:self.pendingLanguage
voiceId:self.pendingVoiceId];
});
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidConnect)]) {
[self.delegate voiceChatStreamingManagerDidConnect];
}
});
}
- (void)voiceChatClientDidDisconnect:(NSError *_Nullable)error {
dispatch_async(self.stateQueue, ^{
if (self.streaming) {
[self.audioCapture stopCapture];
self.streaming = NO;
}
[self.audioSession deactivateSession];
self.sessionId = nil;
});
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidDisconnect:)]) {
[self.delegate voiceChatStreamingManagerDidDisconnect:error];
}
});
}
- (void)voiceChatClientDidStartSession:(NSString *)sessionId {
dispatch_async(self.stateQueue, ^{
self.sessionId = sessionId;
NSError *error = nil;
if (![self.audioCapture startCapture:&error]) {
[self reportError:error];
[self.webSocketClient cancel];
return;
}
self.streaming = YES;
[self.webSocketClient enableAudioSending];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidStartSession:)]) {
[self.delegate voiceChatStreamingManagerDidStartSession:sessionId];
}
});
});
}
- (void)voiceChatClientDidStartTurn:(NSInteger)turnIndex {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidStartTurn:)]) {
[self.delegate voiceChatStreamingManagerDidStartTurn:turnIndex];
}
});
}
- (void)voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:(NSString *)text
confidence:(double)confidence {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector
(voiceChatStreamingManagerDidReceiveEagerEndOfTurnWithTranscript:
confidence:)]) {
[self.delegate
voiceChatStreamingManagerDidReceiveEagerEndOfTurnWithTranscript:text
confidence:confidence];
}
});
}
- (void)voiceChatClientDidResumeTurn {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidResumeTurn)]) {
[self.delegate voiceChatStreamingManagerDidResumeTurn];
}
});
}
- (void)voiceChatClientDidReceiveInterimTranscript:(NSString *)text {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidReceiveInterimTranscript:)]) {
[self.delegate voiceChatStreamingManagerDidReceiveInterimTranscript:text];
}
});
}
- (void)voiceChatClientDidReceiveFinalTranscript:(NSString *)text {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidReceiveFinalTranscript:)]) {
[self.delegate voiceChatStreamingManagerDidReceiveFinalTranscript:text];
}
});
}
- (void)voiceChatClientDidReceiveLLMStart {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidReceiveLLMStart)]) {
[self.delegate voiceChatStreamingManagerDidReceiveLLMStart];
}
});
}
- (void)voiceChatClientDidReceiveLLMToken:(NSString *)token {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidReceiveLLMToken:)]) {
[self.delegate voiceChatStreamingManagerDidReceiveLLMToken:token];
}
});
}
- (void)voiceChatClientDidReceiveAudioChunk:(NSData *)audioData {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidReceiveAudioChunk:)]) {
[self.delegate voiceChatStreamingManagerDidReceiveAudioChunk:audioData];
}
});
}
- (void)voiceChatClientDidCompleteWithTranscript:(NSString *)transcript
aiResponse:(NSString *)aiResponse {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatStreamingManagerDidCompleteWithTranscript:
aiResponse:)]) {
[self.delegate voiceChatStreamingManagerDidCompleteWithTranscript:transcript
aiResponse:aiResponse];
}
});
}
- (void)voiceChatClientDidReceiveErrorCode:(NSString *)code
message:(NSString *)message {
NSString *desc = message.length > 0 ? message : @"Server error";
NSError *error = [NSError errorWithDomain:kVoiceChatStreamingManagerErrorDomain
code:-2
userInfo:@{
NSLocalizedDescriptionKey : desc,
@"code" : code ?: @""
}];
[self reportError:error];
}
- (void)voiceChatClientDidFail:(NSError *)error {
[self reportError:error];
}
@end

View File

@@ -1,57 +0,0 @@
//
// VoiceChatWebSocketClient.h
// keyBoard
//
// Created by Mac on 2026/1/21.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@protocol VoiceChatWebSocketClientDelegate <NSObject>
@optional
- (void)voiceChatClientDidConnect;
- (void)voiceChatClientDidDisconnect:(NSError *_Nullable)error;
- (void)voiceChatClientDidStartSession:(NSString *)sessionId;
- (void)voiceChatClientDidStartTurn:(NSInteger)turnIndex;
- (void)voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:(NSString *)text
confidence:(double)confidence;
- (void)voiceChatClientDidResumeTurn;
- (void)voiceChatClientDidReceiveInterimTranscript:(NSString *)text;
- (void)voiceChatClientDidReceiveFinalTranscript:(NSString *)text;
- (void)voiceChatClientDidReceiveLLMStart;
- (void)voiceChatClientDidReceiveLLMToken:(NSString *)token;
- (void)voiceChatClientDidReceiveAudioChunk:(NSData *)audioData;
- (void)voiceChatClientDidCompleteWithTranscript:(NSString *)transcript
aiResponse:(NSString *)aiResponse;
- (void)voiceChatClientDidReceiveErrorCode:(NSString *)code
message:(NSString *)message;
- (void)voiceChatClientDidFail:(NSError *)error;
@end
/// WebSocket client for realtime voice chat.
@interface VoiceChatWebSocketClient : NSObject
@property(nonatomic, weak) id<VoiceChatWebSocketClientDelegate> delegate;
/// Base WebSocket URL, e.g. wss://api.yourdomain.com/api/ws/chat
@property(nonatomic, copy) NSString *serverURL;
@property(nonatomic, assign, readonly, getter=isConnected) BOOL connected;
@property(nonatomic, copy, readonly, nullable) NSString *sessionId;
- (void)connectWithToken:(NSString *)token;
- (void)disconnect;
- (void)startSessionWithLanguage:(nullable NSString *)language
voiceId:(nullable NSString *)voiceId;
- (void)enableAudioSending;
- (void)disableAudioSending;
- (void)sendAudioPCMFrame:(NSData *)pcmFrame;
- (void)endAudio;
- (void)cancel;
@end
NS_ASSUME_NONNULL_END

View File

@@ -1,459 +0,0 @@
//
// VoiceChatWebSocketClient.m
// keyBoard
//
// Created by Mac on 2026/1/21.
//
#import "VoiceChatWebSocketClient.h"
static NSString *const kVoiceChatWebSocketClientErrorDomain =
@"VoiceChatWebSocketClient";
@interface VoiceChatWebSocketClient () <NSURLSessionWebSocketDelegate>
@property(nonatomic, strong) NSURLSession *urlSession;
@property(nonatomic, strong) NSURLSessionWebSocketTask *webSocketTask;
@property(nonatomic, strong) dispatch_queue_t networkQueue;
@property(nonatomic, assign) BOOL connected;
@property(nonatomic, copy) NSString *sessionId;
@property(nonatomic, assign) BOOL audioSendingEnabled;
@end
@implementation VoiceChatWebSocketClient
- (instancetype)init {
self = [super init];
if (self) {
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.voicechat.ws",
DISPATCH_QUEUE_SERIAL);
_serverURL = @"wss://api.yourdomain.com/api/ws/chat";
_audioSendingEnabled = NO;
}
return self;
}
- (void)dealloc {
[self disconnectInternal];
}
#pragma mark - Public Methods
- (void)connectWithToken:(NSString *)token {
dispatch_async(self.networkQueue, ^{
[self disconnectInternal];
NSURL *url = [self buildURLWithToken:token];
if (!url) {
[self reportErrorWithMessage:@"Invalid server URL"];
return;
}
NSLog(@"[VoiceChatWebSocketClient] Connecting: %@", url.absoluteString);
NSURLSessionConfiguration *config =
[NSURLSessionConfiguration defaultSessionConfiguration];
config.timeoutIntervalForRequest = 30;
config.timeoutIntervalForResource = 300;
self.urlSession = [NSURLSession sessionWithConfiguration:config
delegate:self
delegateQueue:nil];
self.webSocketTask = [self.urlSession webSocketTaskWithURL:url];
[self.webSocketTask resume];
[self receiveMessage];
});
}
- (void)disconnect {
dispatch_async(self.networkQueue, ^{
BOOL shouldNotify = self.webSocketTask != nil;
if (shouldNotify) {
NSLog(@"[VoiceChatWebSocketClient] Disconnect requested");
}
[self disconnectInternal];
if (shouldNotify) {
[self notifyDisconnect:nil];
}
});
}
- (void)startSessionWithLanguage:(nullable NSString *)language
voiceId:(nullable NSString *)voiceId {
NSMutableDictionary *message = [NSMutableDictionary dictionary];
message[@"type"] = @"session_start";
NSMutableDictionary *config = [NSMutableDictionary dictionary];
if (language.length > 0) {
config[@"language"] = language;
}
if (voiceId.length > 0) {
config[@"voice_id"] = voiceId;
}
if (config.count > 0) {
message[@"config"] = config;
}
NSLog(@"[VoiceChatWebSocketClient] Sending session_start: %@",
message);
[self sendJSON:message];
}
- (void)enableAudioSending {
dispatch_async(self.networkQueue, ^{
self.audioSendingEnabled = YES;
});
}
- (void)disableAudioSending {
dispatch_async(self.networkQueue, ^{
self.audioSendingEnabled = NO;
});
}
- (void)sendAudioPCMFrame:(NSData *)pcmFrame {
if (!self.connected || !self.webSocketTask || pcmFrame.length == 0) {
return;
}
dispatch_async(self.networkQueue, ^{
if (!self.audioSendingEnabled) {
return;
}
if (!self.connected || !self.webSocketTask) {
return;
}
NSURLSessionWebSocketMessage *message =
[[NSURLSessionWebSocketMessage alloc] initWithData:pcmFrame];
[self.webSocketTask
sendMessage:message
completionHandler:^(NSError *_Nullable error) {
if (error) {
[self reportError:error];
} else {
NSLog(@"[VoiceChatWebSocketClient] Sent audio frame: %lu bytes",
(unsigned long)pcmFrame.length);
}
}];
});
}
- (void)endAudio {
NSLog(@"[VoiceChatWebSocketClient] Sending audio_end");
[self sendJSON:@{ @"type" : @"audio_end" }];
}
- (void)cancel {
NSLog(@"[VoiceChatWebSocketClient] Sending cancel");
[self sendJSON:@{ @"type" : @"cancel" }];
}
#pragma mark - Private Methods
- (NSURL *)buildURLWithToken:(NSString *)token {
if (self.serverURL.length == 0) {
return nil;
}
NSURLComponents *components =
[NSURLComponents componentsWithString:self.serverURL];
if (!components) {
return nil;
}
if (token.length > 0) {
NSMutableArray<NSURLQueryItem *> *items =
components.queryItems.mutableCopy ?: [NSMutableArray array];
BOOL didReplace = NO;
for (NSUInteger i = 0; i < items.count; i++) {
NSURLQueryItem *item = items[i];
if ([item.name isEqualToString:@"token"]) {
items[i] = [NSURLQueryItem queryItemWithName:@"token" value:token];
didReplace = YES;
break;
}
}
if (!didReplace) {
[items addObject:[NSURLQueryItem queryItemWithName:@"token"
value:token]];
}
components.queryItems = items;
}
return components.URL;
}
- (void)sendJSON:(NSDictionary *)dict {
if (!self.webSocketTask) {
return;
}
NSError *jsonError = nil;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:dict
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
NSString *jsonString =
[[NSString alloc] initWithData:jsonData
encoding:NSUTF8StringEncoding];
if (!jsonString) {
[self reportErrorWithMessage:@"Failed to encode JSON message"];
return;
}
dispatch_async(self.networkQueue, ^{
NSURLSessionWebSocketMessage *message =
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
[self.webSocketTask
sendMessage:message
completionHandler:^(NSError *_Nullable error) {
if (error) {
[self reportError:error];
}
}];
});
}
- (void)receiveMessage {
if (!self.webSocketTask) {
return;
}
__weak typeof(self) weakSelf = self;
[self.webSocketTask receiveMessageWithCompletionHandler:^(
NSURLSessionWebSocketMessage *_Nullable message,
NSError *_Nullable error) {
__strong typeof(weakSelf) strongSelf = weakSelf;
if (!strongSelf) {
return;
}
if (error) {
if (error.code != NSURLErrorCancelled && error.code != 57) {
[strongSelf notifyDisconnect:error];
[strongSelf disconnectInternal];
}
return;
}
if (message.type == NSURLSessionWebSocketMessageTypeString) {
NSLog(@"[VoiceChatWebSocketClient] Received text: %@", message.string);
[strongSelf handleTextMessage:message.string];
} else if (message.type == NSURLSessionWebSocketMessageTypeData) {
NSLog(@"[VoiceChatWebSocketClient] Received binary: %lu bytes",
(unsigned long)message.data.length);
[strongSelf handleBinaryMessage:message.data];
}
[strongSelf receiveMessage];
}];
}
- (void)handleTextMessage:(NSString *)text {
if (text.length == 0) {
return;
}
NSData *data = [text dataUsingEncoding:NSUTF8StringEncoding];
if (!data) {
return;
}
NSError *jsonError = nil;
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data
options:0
error:&jsonError];
if (jsonError) {
[self reportError:jsonError];
return;
}
NSString *type = json[@"type"];
if (type.length == 0) {
return;
}
if ([type isEqualToString:@"session_started"]) {
NSString *sessionId = json[@"session_id"] ?: @"";
self.sessionId = sessionId;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidStartSession:)]) {
[self.delegate voiceChatClientDidStartSession:sessionId];
}
});
} else if ([type isEqualToString:@"transcript_interim"]) {
NSString *transcript = json[@"text"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidReceiveInterimTranscript:)]) {
[self.delegate voiceChatClientDidReceiveInterimTranscript:transcript];
}
});
} else if ([type isEqualToString:@"transcript_final"]) {
NSString *transcript = json[@"text"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidReceiveFinalTranscript:)]) {
[self.delegate voiceChatClientDidReceiveFinalTranscript:transcript];
}
});
} else if ([type isEqualToString:@"turn_start"]) {
NSInteger turnIndex = [json[@"turn_index"] integerValue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidStartTurn:)]) {
[self.delegate voiceChatClientDidStartTurn:turnIndex];
}
});
} else if ([type isEqualToString:@"eager_eot"]) {
NSString *transcript = json[@"transcript"] ?: @"";
double confidence = [json[@"confidence"] doubleValue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:
confidence:)]) {
[self.delegate
voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:transcript
confidence:confidence];
}
});
} else if ([type isEqualToString:@"turn_resumed"]) {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidResumeTurn)]) {
[self.delegate voiceChatClientDidResumeTurn];
}
});
} else if ([type isEqualToString:@"llm_start"]) {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(voiceChatClientDidReceiveLLMStart)]) {
[self.delegate voiceChatClientDidReceiveLLMStart];
}
});
} else if ([type isEqualToString:@"llm_token"]) {
NSString *token = json[@"token"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(voiceChatClientDidReceiveLLMToken:)]) {
[self.delegate voiceChatClientDidReceiveLLMToken:token];
}
});
} else if ([type isEqualToString:@"complete"]) {
NSString *transcript = json[@"transcript"] ?: @"";
NSString *aiResponse = json[@"ai_response"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidCompleteWithTranscript:
aiResponse:)]) {
[self.delegate voiceChatClientDidCompleteWithTranscript:transcript
aiResponse:aiResponse];
}
});
} else if ([type isEqualToString:@"error"]) {
NSString *code = json[@"code"] ?: @"";
NSString *message = json[@"message"] ?: @"";
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidReceiveErrorCode:message:)]) {
[self.delegate voiceChatClientDidReceiveErrorCode:code
message:message];
}
});
}
}
- (void)handleBinaryMessage:(NSData *)data {
if (data.length == 0) {
return;
}
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate
respondsToSelector:@selector(voiceChatClientDidReceiveAudioChunk:)]) {
[self.delegate voiceChatClientDidReceiveAudioChunk:data];
}
});
}
- (void)disconnectInternal {
self.connected = NO;
self.sessionId = nil;
self.audioSendingEnabled = NO;
if (self.webSocketTask) {
[self.webSocketTask
cancelWithCloseCode:NSURLSessionWebSocketCloseCodeNormalClosure
reason:nil];
self.webSocketTask = nil;
}
if (self.urlSession) {
[self.urlSession invalidateAndCancel];
self.urlSession = nil;
}
}
- (void)reportError:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(voiceChatClientDidFail:)]) {
[self.delegate voiceChatClientDidFail:error];
}
});
}
- (void)reportErrorWithMessage:(NSString *)message {
NSError *error = [NSError errorWithDomain:kVoiceChatWebSocketClientErrorDomain
code:-1
userInfo:@{
NSLocalizedDescriptionKey : message ?: @""
}];
[self reportError:error];
}
- (void)notifyDisconnect:(NSError *_Nullable)error {
self.connected = NO;
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector
(voiceChatClientDidDisconnect:)]) {
[self.delegate voiceChatClientDidDisconnect:error];
}
});
}
#pragma mark - NSURLSessionWebSocketDelegate
- (void)URLSession:(NSURLSession *)session
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
didOpenWithProtocol:(NSString *)protocol {
self.connected = YES;
NSLog(@"[VoiceChatWebSocketClient] Connected");
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(voiceChatClientDidConnect)]) {
[self.delegate voiceChatClientDidConnect];
}
});
}
- (void)URLSession:(NSURLSession *)session
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
didCloseWithCode:(NSURLSessionWebSocketCloseCode)closeCode
reason:(NSData *)reason {
if (!self.webSocketTask) {
return;
}
NSLog(@"[VoiceChatWebSocketClient] Closed with code: %ld",
(long)closeCode);
[self notifyDisconnect:nil];
[self disconnectInternal];
}
@end