Compare commits
10 Commits
e4223b3a4c
...
a83fd918a8
| Author | SHA1 | Date | |
|---|---|---|---|
| a83fd918a8 | |||
| 4168da618e | |||
| d2ffada83f | |||
| 76d387e08b | |||
| ea0df4fb19 | |||
| 02323fb5f1 | |||
| 3c71797b7b | |||
| 4c57f16058 | |||
| cb2e8467a7 | |||
| 4dfd6f5cbb |
8
.claude/settings.local.json
Normal file
8
.claude/settings.local.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"WebSearch",
|
||||
"Bash(git checkout:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,9 @@
|
||||
#import "UIImage+KBColor.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <SDWebImage/SDWebImage.h>
|
||||
#if DEBUG
|
||||
#import <mach/mach.h>
|
||||
#endif
|
||||
|
||||
// #import "KBLog.h"
|
||||
|
||||
@@ -78,10 +81,6 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
KBFunctionView *functionView; // 功能面板视图(点击工具栏第0个时显示)
|
||||
@property(nonatomic, strong) KBSettingView *settingView; // 设置页
|
||||
@property(nonatomic, strong) UIImageView *bgImageView; // 背景图(在底层)
|
||||
@property(nonatomic, strong) UIImageView *personaAvatarImageView; // 语音模式下显示的 persona 小头像
|
||||
@property(nonatomic, strong) UIImageView *personaGrayImageView; // 语音模式下显示的 persona 小头像
|
||||
@property(nonatomic, strong) UIVisualEffectView *personaBlurView; // 语音模式下头像高斯模糊层
|
||||
|
||||
@property(nonatomic, strong) KBChatPanelView *chatPanelView;
|
||||
@property(nonatomic, strong) KBKeyboardSubscriptionView *subscriptionView;
|
||||
@property(nonatomic, strong) KBSuggestionEngine *suggestionEngine;
|
||||
@@ -96,19 +95,56 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
@property(nonatomic, strong) NSLayoutConstraint *kb_widthConstraint;
|
||||
@property(nonatomic, assign) CGFloat kb_lastPortraitWidth;
|
||||
@property(nonatomic, assign) CGFloat kb_lastKeyboardHeight;
|
||||
@property(nonatomic, strong) UIImage *kb_cachedGradientImage;
|
||||
@property(nonatomic, assign) CGSize kb_cachedGradientSize;
|
||||
@property(nonatomic, strong, nullable) CAGradientLayer *kb_defaultGradientLayer;
|
||||
@property(nonatomic, copy, nullable) NSString *kb_lastAppliedThemeKey;
|
||||
@property(nonatomic, strong) NSMutableArray<KBChatMessage *> *chatMessages;
|
||||
@property(nonatomic, strong) AVAudioPlayer *chatAudioPlayer;
|
||||
@property(nonatomic, assign) BOOL chatPanelVisible;
|
||||
@property(nonatomic, strong, nullable) id kb_fullAccessObserverToken;
|
||||
@property(nonatomic, strong, nullable) id kb_skinObserverToken;
|
||||
@end
|
||||
|
||||
#if DEBUG
|
||||
static NSInteger sKBKeyboardVCAliveCount = 0;
|
||||
|
||||
static uint64_t KBPhysFootprintBytes(void) {
|
||||
task_vm_info_data_t vmInfo;
|
||||
mach_msg_type_number_t count = TASK_VM_INFO_COUNT;
|
||||
kern_return_t kr = task_info(mach_task_self(), TASK_VM_INFO,
|
||||
(task_info_t)&vmInfo, &count);
|
||||
if (kr != KERN_SUCCESS) {
|
||||
return 0;
|
||||
}
|
||||
return (uint64_t)vmInfo.phys_footprint;
|
||||
}
|
||||
|
||||
static NSString *KBFormatMB(uint64_t bytes) {
|
||||
double mb = (double)bytes / 1024.0 / 1024.0;
|
||||
return [NSString stringWithFormat:@"%.1fMB", mb];
|
||||
}
|
||||
#endif
|
||||
|
||||
@implementation KeyboardViewController
|
||||
|
||||
{
|
||||
BOOL _kb_didTriggerLoginDeepLinkOnce;
|
||||
#if DEBUG
|
||||
BOOL _kb_debugDidCountAlive;
|
||||
#endif
|
||||
}
|
||||
|
||||
- (void)viewDidLoad {
|
||||
[super viewDidLoad];
|
||||
#if DEBUG
|
||||
if (!_kb_debugDidCountAlive) {
|
||||
_kb_debugDidCountAlive = YES;
|
||||
sKBKeyboardVCAliveCount += 1;
|
||||
}
|
||||
NSLog(@"[Keyboard] KeyboardViewController viewDidLoad alive=%ld self=%p mem=%@",
|
||||
(long)sKBKeyboardVCAliveCount, self, KBFormatMB(KBPhysFootprintBytes()));
|
||||
#endif
|
||||
// 撤销删除是“上一段删除操作”的临时状态;键盘被系统回收/重建或跨页面回来时应当清空,避免误显示。
|
||||
[[KBBackspaceUndoManager shared] registerNonClearAction];
|
||||
[self setupUI];
|
||||
@@ -118,7 +154,7 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
[KBHUD setContainerView:self.view];
|
||||
// 绑定完全访问管理器,便于统一感知和联动网络开关
|
||||
[[KBFullAccessManager shared] bindInputController:self];
|
||||
__unused id token = [[NSNotificationCenter defaultCenter]
|
||||
self.kb_fullAccessObserverToken = [[NSNotificationCenter defaultCenter]
|
||||
addObserverForName:KBFullAccessChangedNotification
|
||||
object:nil
|
||||
queue:[NSOperationQueue mainQueue]
|
||||
@@ -127,11 +163,16 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
}];
|
||||
|
||||
// 皮肤变化时,立即应用
|
||||
__unused id token2 = [[NSNotificationCenter defaultCenter]
|
||||
__weak typeof(self) weakSelf = self;
|
||||
self.kb_skinObserverToken = [[NSNotificationCenter defaultCenter]
|
||||
addObserverForName:KBSkinDidChangeNotification
|
||||
object:nil
|
||||
queue:[NSOperationQueue mainQueue]
|
||||
usingBlock:^(__unused NSNotification *_Nonnull note) {
|
||||
__strong typeof(weakSelf) self = weakSelf;
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
[self kb_applyTheme];
|
||||
}];
|
||||
[self kb_applyTheme];
|
||||
@@ -144,12 +185,38 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
[self kb_applyDefaultSkinIfNeeded];
|
||||
}
|
||||
|
||||
- (void)didReceiveMemoryWarning {
|
||||
[super didReceiveMemoryWarning];
|
||||
// 扩展进程内存上限较小:在系统发出内存警告时主动清理可重建的缓存,降低被系统杀死概率。
|
||||
self.kb_cachedGradientImage = nil;
|
||||
[self.kb_defaultGradientLayer removeFromSuperlayer];
|
||||
self.kb_defaultGradientLayer = nil;
|
||||
[[KBSkinManager shared] clearRuntimeImageCaches];
|
||||
[[SDImageCache sharedImageCache] clearMemory];
|
||||
}
|
||||
|
||||
- (void)viewWillAppear:(BOOL)animated {
|
||||
[super viewWillAppear:animated];
|
||||
// FIX: iOS 26 键盘闪烁问题 —— 恢复键盘正确高度
|
||||
// setupUI 中高度初始为 0(防止系统预渲染快照闪烁),此处恢复为实际键盘高度。
|
||||
// 此时系统已准备好键盘滑入动画,恢复高度后键盘将正常从底部滑入。
|
||||
CGFloat portraitWidth = [self kb_portraitWidth];
|
||||
CGFloat keyboardHeight = [self kb_keyboardHeightForWidth:portraitWidth];
|
||||
if (self.kb_heightConstraint) {
|
||||
self.kb_heightConstraint.constant = keyboardHeight;
|
||||
}
|
||||
// 进入/重新进入输入界面时,清理上一次会话残留的撤销状态与缓存,避免显示“撤销删除”但实际上已不可撤销。
|
||||
[[KBBackspaceUndoManager shared] registerNonClearAction];
|
||||
[[KBInputBufferManager shared] resetWithText:@""];
|
||||
[[KBLocalizationManager shared] reloadFromSharedStorageIfNeeded];
|
||||
// 键盘再次出现时,恢复 HUD 容器与主题(viewDidDisappear 里可能已清理图片/缓存)。
|
||||
[KBHUD setContainerView:self.view];
|
||||
[self kb_ensureKeyBoardMainViewIfNeeded];
|
||||
[self kb_applyTheme];
|
||||
#if DEBUG
|
||||
NSLog(@"[Keyboard] viewWillAppear self=%p mem=%@",
|
||||
self, KBFormatMB(KBPhysFootprintBytes()));
|
||||
#endif
|
||||
// 注意:微信/QQ 等宿主的 documentContext 可能是“截断窗口”,这里只更新
|
||||
// liveText,不要把它当作全文 manualSnapshot。
|
||||
[[KBInputBufferManager shared]
|
||||
@@ -162,8 +229,17 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
- (void)viewWillDisappear:(BOOL)animated {
|
||||
[super viewWillDisappear:animated];
|
||||
[[KBBackspaceUndoManager shared] registerNonClearAction];
|
||||
// 清理 persona 头像内存
|
||||
[self kb_hidePersonaAvatar];
|
||||
[self kb_releaseMemoryWhenKeyboardHidden];
|
||||
#if DEBUG
|
||||
NSLog(@"[Keyboard] viewWillDisappear self=%p mem=%@",
|
||||
self, KBFormatMB(KBPhysFootprintBytes()));
|
||||
#endif
|
||||
}
|
||||
|
||||
- (void)viewDidDisappear:(BOOL)animated {
|
||||
[super viewDidDisappear:animated];
|
||||
// 再兜底一次,防止某些宿主只触发 willDisappear 而未触发 didDisappear。
|
||||
[self kb_releaseMemoryWhenKeyboardHidden];
|
||||
}
|
||||
|
||||
- (void)traitCollectionDidChange:(UITraitCollection *)previousTraitCollection {
|
||||
@@ -171,6 +247,7 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
if (@available(iOS 13.0, *)) {
|
||||
if (previousTraitCollection.userInterfaceStyle !=
|
||||
self.traitCollection.userInterfaceStyle) {
|
||||
self.kb_cachedGradientImage = nil;
|
||||
[self kb_applyDefaultSkinIfNeeded];
|
||||
}
|
||||
}
|
||||
@@ -192,12 +269,17 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
CGFloat portraitWidth = [self kb_portraitWidth];
|
||||
CGFloat keyboardHeight = [self kb_keyboardHeightForWidth:portraitWidth];
|
||||
CGFloat keyboardBaseHeight = [self kb_keyboardBaseHeightForWidth:portraitWidth];
|
||||
CGFloat chatPanelHeight = [self kb_chatPanelHeightForWidth:portraitWidth];
|
||||
CGFloat screenWidth = CGRectGetWidth([UIScreen mainScreen].bounds);
|
||||
CGFloat outerVerticalInset = KBFit(4.0f);
|
||||
|
||||
// FIX: iOS 26 键盘闪烁问题
|
||||
// iOS 26 在键盘滑入动画开始前,会对 self.view 做一次离屏预渲染快照(非实时 view),
|
||||
// 该快照会短暂显示在屏幕中间。如果此时 view 已有完整高度和内容,用户就会看到
|
||||
// 键盘 UI 在屏幕中间闪现一帧,然后键盘才从底部正常滑入。
|
||||
// 解决方案:初始高度设为 0,让系统快照时无内容可渲染;
|
||||
// 在 viewWillAppear: 中恢复正确高度,此时系统已准备好滑入动画。
|
||||
// (iOS 18 及更早版本无此预渲染机制,不受影响)
|
||||
NSLayoutConstraint *h =
|
||||
[self.view.heightAnchor constraintEqualToConstant:keyboardHeight];
|
||||
[self.view.heightAnchor constraintEqualToConstant:0];
|
||||
NSLayoutConstraint *w =
|
||||
[self.view.widthAnchor constraintEqualToConstant:screenWidth];
|
||||
self.kb_heightConstraint = h;
|
||||
@@ -227,12 +309,6 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
[self.bgImageView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.edges.equalTo(self.contentView);
|
||||
}];
|
||||
// 预置功能面板(默认隐藏),与键盘区域共享相同布局
|
||||
self.functionView.hidden = YES;
|
||||
[self.contentView addSubview:self.functionView];
|
||||
[self.functionView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.edges.equalTo(self.contentView);
|
||||
}];
|
||||
|
||||
[self.contentView addSubview:self.keyBoardMainView];
|
||||
[self.keyBoardMainView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
@@ -242,14 +318,8 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
make.height.mas_equalTo(keyboardBaseHeight);
|
||||
}];
|
||||
|
||||
[self.contentView addSubview:self.chatPanelView];
|
||||
[self.chatPanelView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.left.right.equalTo(self.contentView);
|
||||
make.bottom.equalTo(self.keyBoardMainView.mas_top);
|
||||
self.chatPanelHeightConstraint =
|
||||
make.height.mas_equalTo(chatPanelHeight);
|
||||
}];
|
||||
self.chatPanelView.hidden = YES;
|
||||
// 初始隐藏,避免布局完成前闪烁
|
||||
self.contentView.hidden = YES;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
@@ -389,8 +459,14 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
// 简单显隐切换,复用相同的布局区域
|
||||
if (show) {
|
||||
[self showChatPanel:NO];
|
||||
[self kb_ensureFunctionViewIfNeeded];
|
||||
}
|
||||
if (_functionView) {
|
||||
_functionView.hidden = !show;
|
||||
} else if (show) {
|
||||
// ensure 后按理已存在;这里兜底一次,避免异常情况下状态不一致
|
||||
self.functionView.hidden = NO;
|
||||
}
|
||||
self.functionView.hidden = !show;
|
||||
self.keyBoardMainView.hidden = show;
|
||||
|
||||
if (show) {
|
||||
@@ -410,7 +486,9 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
|
||||
// 可选:把当前显示的视图置顶,避免层级遮挡
|
||||
if (show) {
|
||||
[self.contentView bringSubviewToFront:self.functionView];
|
||||
if (_functionView) {
|
||||
[self.contentView bringSubviewToFront:_functionView];
|
||||
}
|
||||
} else {
|
||||
[self.contentView bringSubviewToFront:self.keyBoardMainView];
|
||||
}
|
||||
@@ -425,19 +503,18 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
pageId:@"keyboard_settings"
|
||||
extra:nil
|
||||
completion:nil];
|
||||
// if (!self.settingView) {
|
||||
self.settingView = [[KBSettingView alloc] init];
|
||||
self.settingView.hidden = YES;
|
||||
[self.contentView addSubview:self.settingView];
|
||||
[self.settingView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
// 与键盘主视图完全等同的区域,保证高度、宽度一致
|
||||
make.edges.equalTo(self.contentView);
|
||||
}];
|
||||
[self.settingView.backButton addTarget:self
|
||||
action:@selector(onTapSettingsBack)
|
||||
forControlEvents:UIControlEventTouchUpInside];
|
||||
// }
|
||||
[self.contentView bringSubviewToFront:self.settingView];
|
||||
KBSettingView *settingView = self.settingView;
|
||||
if (!settingView.superview) {
|
||||
settingView.hidden = YES;
|
||||
[self.contentView addSubview:settingView];
|
||||
[settingView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.edges.equalTo(self.contentView);
|
||||
}];
|
||||
[settingView.backButton addTarget:self
|
||||
action:@selector(onTapSettingsBack)
|
||||
forControlEvents:UIControlEventTouchUpInside];
|
||||
}
|
||||
[self.contentView bringSubviewToFront:settingView];
|
||||
// 以 keyBoardMainView 的实际宽度为准,避免首次添加时 self.view 宽度尚未计算
|
||||
[self.contentView layoutIfNeeded];
|
||||
CGFloat w = CGRectGetWidth(self.keyBoardMainView.bounds);
|
||||
@@ -447,17 +524,18 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
if (w <= 0) {
|
||||
w = [self kb_portraitWidth];
|
||||
}
|
||||
self.settingView.transform = CGAffineTransformMakeTranslation(w, 0);
|
||||
self.settingView.hidden = NO;
|
||||
settingView.transform = CGAffineTransformMakeTranslation(w, 0);
|
||||
settingView.hidden = NO;
|
||||
[UIView animateWithDuration:0.25
|
||||
delay:0
|
||||
options:UIViewAnimationOptionCurveEaseOut
|
||||
animations:^{
|
||||
self.settingView.transform = CGAffineTransformIdentity;
|
||||
settingView.transform = CGAffineTransformIdentity;
|
||||
}
|
||||
completion:nil];
|
||||
} else {
|
||||
if (!self.settingView || self.settingView.hidden)
|
||||
KBSettingView *settingView = self.settingView;
|
||||
if (!settingView.superview || settingView.hidden)
|
||||
return;
|
||||
CGFloat w = CGRectGetWidth(self.keyBoardMainView.bounds);
|
||||
if (w <= 0) {
|
||||
@@ -470,10 +548,10 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
delay:0
|
||||
options:UIViewAnimationOptionCurveEaseIn
|
||||
animations:^{
|
||||
self.settingView.transform = CGAffineTransformMakeTranslation(w, 0);
|
||||
settingView.transform = CGAffineTransformMakeTranslation(w, 0);
|
||||
}
|
||||
completion:^(BOOL finished) {
|
||||
self.settingView.hidden = YES;
|
||||
settingView.hidden = YES;
|
||||
}];
|
||||
}
|
||||
}
|
||||
@@ -485,10 +563,13 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
}
|
||||
self.chatPanelVisible = show;
|
||||
if (show) {
|
||||
[self kb_ensureChatPanelViewIfNeeded];
|
||||
self.chatPanelView.hidden = NO;
|
||||
self.chatPanelView.alpha = 0.0;
|
||||
[self.contentView bringSubviewToFront:self.chatPanelView];
|
||||
self.functionView.hidden = YES;
|
||||
if (_functionView) {
|
||||
_functionView.hidden = YES;
|
||||
}
|
||||
[self hideSubscriptionPanel];
|
||||
[self showSettingView:NO];
|
||||
[UIView animateWithDuration:0.2
|
||||
@@ -499,6 +580,11 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
}
|
||||
completion:nil];
|
||||
} else {
|
||||
// 从未创建过聊天面板时,直接返回,避免 show/hide 触发额外内存分配
|
||||
if (!_chatPanelView) {
|
||||
[self kb_updateKeyboardLayoutIfNeeded];
|
||||
return;
|
||||
}
|
||||
[UIView animateWithDuration:0.18
|
||||
delay:0
|
||||
options:UIViewAnimationOptionCurveEaseIn
|
||||
@@ -512,6 +598,114 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
[self kb_updateKeyboardLayoutIfNeeded];
|
||||
}
|
||||
|
||||
// 延迟创建:仅在用户真正打开功能面板时才创建/布局,降低默认内存占用。
|
||||
- (void)kb_ensureFunctionViewIfNeeded {
|
||||
if (_functionView && _functionView.superview) {
|
||||
return;
|
||||
}
|
||||
KBFunctionView *v = self.functionView;
|
||||
if (!v.superview) {
|
||||
v.hidden = YES;
|
||||
[self.contentView addSubview:v];
|
||||
[v mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.edges.equalTo(self.contentView);
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
// 延迟创建:仅在用户打开聊天面板时才创建/布局。
|
||||
- (void)kb_ensureChatPanelViewIfNeeded {
|
||||
if (_chatPanelView && _chatPanelView.superview) {
|
||||
return;
|
||||
}
|
||||
CGFloat portraitWidth = [self kb_portraitWidth];
|
||||
CGFloat chatPanelHeight = [self kb_chatPanelHeightForWidth:portraitWidth];
|
||||
KBChatPanelView *v = self.chatPanelView;
|
||||
if (!v.superview) {
|
||||
[self.contentView addSubview:v];
|
||||
[v mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.left.right.equalTo(self.contentView);
|
||||
make.bottom.equalTo(self.keyBoardMainView.mas_top);
|
||||
self.chatPanelHeightConstraint =
|
||||
make.height.mas_equalTo(chatPanelHeight);
|
||||
}];
|
||||
v.hidden = YES;
|
||||
}
|
||||
}
|
||||
|
||||
// 延迟创建:键盘主面板(按键区)在隐藏时会被释放;再次显示时需要重建。
|
||||
- (void)kb_ensureKeyBoardMainViewIfNeeded {
|
||||
if (_keyBoardMainView && _keyBoardMainView.superview) {
|
||||
return;
|
||||
}
|
||||
CGFloat portraitWidth = [self kb_portraitWidth];
|
||||
CGFloat keyboardBaseHeight =
|
||||
[self kb_keyboardBaseHeightForWidth:portraitWidth];
|
||||
KBKeyBoardMainView *v = self.keyBoardMainView;
|
||||
if (!v.superview) {
|
||||
[self.contentView addSubview:v];
|
||||
[v mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.left.right.equalTo(self.contentView);
|
||||
make.bottom.equalTo(self.contentView);
|
||||
self.keyBoardMainHeightConstraint =
|
||||
make.height.mas_equalTo(keyboardBaseHeight);
|
||||
}];
|
||||
}
|
||||
[self.contentView bringSubviewToFront:v];
|
||||
}
|
||||
|
||||
// 键盘隐藏时释放可重建资源(背景图/缓存/非必需面板),降低扩展内存峰值。
|
||||
- (void)kb_releaseMemoryWhenKeyboardHidden {
|
||||
[KBHUD setContainerView:nil];
|
||||
self.bgImageView.image = nil;
|
||||
self.kb_cachedGradientImage = nil;
|
||||
[self.kb_defaultGradientLayer removeFromSuperlayer];
|
||||
self.kb_defaultGradientLayer = nil;
|
||||
[[SDImageCache sharedImageCache] clearMemory];
|
||||
|
||||
// 聊天相关可能持有音频数据/临时文件,键盘隐藏时直接清空,避免累计占用。
|
||||
if (self.chatAudioPlayer) {
|
||||
[self.chatAudioPlayer stop];
|
||||
self.chatAudioPlayer = nil;
|
||||
}
|
||||
if (_chatMessages.count > 0) {
|
||||
NSString *tmpRoot = NSTemporaryDirectory();
|
||||
for (KBChatMessage *msg in _chatMessages.copy) {
|
||||
if (tmpRoot.length > 0 && msg.audioFilePath.length > 0 &&
|
||||
[msg.audioFilePath hasPrefix:tmpRoot]) {
|
||||
[[NSFileManager defaultManager] removeItemAtPath:msg.audioFilePath
|
||||
error:nil];
|
||||
}
|
||||
}
|
||||
[_chatMessages removeAllObjects];
|
||||
}
|
||||
|
||||
if (_keyBoardMainView) {
|
||||
[_keyBoardMainView removeFromSuperview];
|
||||
_keyBoardMainView = nil;
|
||||
}
|
||||
self.keyBoardMainHeightConstraint = nil;
|
||||
|
||||
if (_functionView) {
|
||||
[_functionView removeFromSuperview];
|
||||
_functionView = nil;
|
||||
}
|
||||
if (_chatPanelView) {
|
||||
[_chatPanelView removeFromSuperview];
|
||||
_chatPanelView = nil;
|
||||
}
|
||||
self.chatPanelVisible = NO;
|
||||
|
||||
if (_subscriptionView) {
|
||||
[_subscriptionView removeFromSuperview];
|
||||
_subscriptionView = nil;
|
||||
}
|
||||
if (_settingView) {
|
||||
[_settingView removeFromSuperview];
|
||||
_settingView = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)showSubscriptionPanel {
|
||||
// 1) 先判断权限:未开启“完全访问”则走引导逻辑
|
||||
if (![[KBFullAccessManager shared] hasFullAccess]) {
|
||||
@@ -663,8 +857,6 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
if (index == 1) {
|
||||
[self showFunctionPanel:NO];
|
||||
[self showChatPanel:YES];
|
||||
// 显示 persona 头像
|
||||
[self kb_showPersonaAvatarOnBgImageView];
|
||||
return;
|
||||
}
|
||||
[self showFunctionPanel:NO];
|
||||
@@ -826,14 +1018,12 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
|
||||
- (void)chatPanelViewDidTapClose:(KBChatPanelView *)view {
|
||||
// 清空 chatPanelView 内部的消息
|
||||
[self.chatPanelView kb_reloadWithMessages:@[]];
|
||||
[view kb_reloadWithMessages:@[]];
|
||||
if (self.chatAudioPlayer.isPlaying) {
|
||||
[self.chatAudioPlayer stop];
|
||||
}
|
||||
self.chatAudioPlayer = nil;
|
||||
[self showChatPanel:NO];
|
||||
// 隐藏 persona 头像
|
||||
[self kb_hidePersonaAvatar];
|
||||
}
|
||||
|
||||
#pragma mark - Chat Helpers
|
||||
@@ -1482,108 +1672,6 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
return _subscriptionView;
|
||||
}
|
||||
|
||||
- (UIImageView *)personaAvatarImageView {
|
||||
if (!_personaAvatarImageView) {
|
||||
_personaAvatarImageView = [[UIImageView alloc] init];
|
||||
_personaAvatarImageView.contentMode = UIViewContentModeScaleAspectFill;
|
||||
_personaAvatarImageView.clipsToBounds = YES;
|
||||
_personaAvatarImageView.hidden = YES;
|
||||
[_personaAvatarImageView addSubview:self.personaBlurView];
|
||||
[self.personaBlurView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.edges.equalTo(_personaAvatarImageView);
|
||||
}];
|
||||
}
|
||||
return _personaAvatarImageView;
|
||||
}
|
||||
- (UIImageView *)personaGrayImageView{
|
||||
if (!_personaGrayImageView) {
|
||||
_personaGrayImageView = [[UIImageView alloc] init];
|
||||
_personaAvatarImageView.contentMode = UIViewContentModeScaleAspectFill;
|
||||
|
||||
}
|
||||
return _personaGrayImageView;
|
||||
}
|
||||
|
||||
- (UIVisualEffectView *)personaBlurView {
|
||||
if (!_personaBlurView) {
|
||||
UIBlurEffect *effect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleLight];
|
||||
_personaBlurView = [[UIVisualEffectView alloc] initWithEffect:effect];
|
||||
_personaBlurView.hidden = YES;
|
||||
_personaBlurView.userInteractionEnabled = NO;
|
||||
}
|
||||
return _personaBlurView;
|
||||
}
|
||||
|
||||
#pragma mark - Persona Avatar
|
||||
|
||||
/// 从 AppGroup 读取选中的 persona 信息
|
||||
- (NSDictionary *)kb_selectedPersonaFromAppGroup {
|
||||
NSUserDefaults *ud = [[NSUserDefaults alloc] initWithSuiteName:AppGroup];
|
||||
NSDictionary *personaDict = [ud objectForKey:@"AppGroup_SelectedPersona"];
|
||||
if ([personaDict isKindOfClass:[NSDictionary class]]) {
|
||||
return personaDict;
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
/// 在 bgImageView 上显示 persona 头像
|
||||
- (void)kb_showPersonaAvatarOnBgImageView {
|
||||
// 检查是否有完全访问权限
|
||||
if (![[KBFullAccessManager shared] hasFullAccess]) {
|
||||
NSLog(@"[Keyboard] 未开启完全访问,无法显示 persona 头像");
|
||||
return;
|
||||
}
|
||||
|
||||
// 从 AppGroup 共享目录读取预处理好的小图片
|
||||
NSURL *containerURL = [[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:AppGroup];
|
||||
if (!containerURL) {
|
||||
NSLog(@"[Keyboard] 无法获取 AppGroup 容器目录");
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *imagePath = [[containerURL path] stringByAppendingPathComponent:@"persona_cover.jpg"];
|
||||
if (![[NSFileManager defaultManager] fileExistsAtPath:imagePath]) {
|
||||
NSLog(@"[Keyboard] persona 封面图文件不存在: %@", imagePath);
|
||||
return;
|
||||
}
|
||||
|
||||
NSLog(@"[Keyboard] 准备从本地加载 persona 封面图: %@", imagePath);
|
||||
|
||||
// 添加视图到 contentView,与 bgImageView 尺寸一致
|
||||
if (!self.personaAvatarImageView.superview) {
|
||||
[self.contentView insertSubview:self.personaAvatarImageView aboveSubview:self.bgImageView];
|
||||
[self.personaAvatarImageView addSubview:self.personaGrayImageView];
|
||||
[self.personaAvatarImageView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.edges.equalTo(self.bgImageView);
|
||||
}];
|
||||
[self.personaGrayImageView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
make.left.right.bottom.equalTo(self.personaAvatarImageView);
|
||||
make.height.mas_equalTo(self.keyBoardMainView);
|
||||
}];
|
||||
}
|
||||
|
||||
// 先清理旧图片
|
||||
self.personaAvatarImageView.image = nil;
|
||||
|
||||
// 从本地文件加载图片(已经是缩小后的小图片,内存占用很小)
|
||||
UIImage *image = [UIImage imageWithContentsOfFile:imagePath];
|
||||
if (image) {
|
||||
self.personaAvatarImageView.image = image;
|
||||
self.personaAvatarImageView.hidden = NO;
|
||||
self.personaBlurView.hidden = NO;
|
||||
NSLog(@"[Keyboard] persona 封面图加载成功");
|
||||
} else {
|
||||
NSLog(@"[Keyboard] persona 封面图加载失败");
|
||||
}
|
||||
}
|
||||
|
||||
/// 隐藏 persona 头像
|
||||
- (void)kb_hidePersonaAvatar {
|
||||
self.personaAvatarImageView.hidden = YES;
|
||||
self.personaAvatarImageView.image = nil;
|
||||
self.personaBlurView.hidden = YES;
|
||||
}
|
||||
|
||||
#pragma mark - Actions
|
||||
|
||||
- (void)kb_openRechargeForProduct:(KBKeyboardSubscriptionProduct *)product {
|
||||
@@ -1638,10 +1726,26 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
if (self.kb_fullAccessObserverToken) {
|
||||
[[NSNotificationCenter defaultCenter]
|
||||
removeObserver:self.kb_fullAccessObserverToken];
|
||||
self.kb_fullAccessObserverToken = nil;
|
||||
}
|
||||
if (self.kb_skinObserverToken) {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self.kb_skinObserverToken];
|
||||
self.kb_skinObserverToken = nil;
|
||||
}
|
||||
CFNotificationCenterRemoveObserver(
|
||||
CFNotificationCenterGetDarwinNotifyCenter(),
|
||||
(__bridge const void *)(self),
|
||||
(__bridge CFStringRef)KBDarwinSkinInstallRequestNotification, NULL);
|
||||
#if DEBUG
|
||||
if (_kb_debugDidCountAlive) {
|
||||
sKBKeyboardVCAliveCount -= 1;
|
||||
}
|
||||
NSLog(@"[Keyboard] KeyboardViewController dealloc alive=%ld self=%p mem=%@",
|
||||
(long)sKBKeyboardVCAliveCount, self, KBFormatMB(KBPhysFootprintBytes()));
|
||||
#endif
|
||||
}
|
||||
|
||||
// 当键盘第一次显示时,尝试唤起主 App 以提示登录(由主 App
|
||||
@@ -1659,7 +1763,15 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
|
||||
- (void)viewDidLayoutSubviews {
|
||||
[super viewDidLayoutSubviews];
|
||||
[self kb_updateKeyboardLayoutIfNeeded];
|
||||
// [self kb_updateKeyboardLayoutIfNeeded];
|
||||
|
||||
// 首次布局完成后显示,避免闪烁
|
||||
if (self.contentView.hidden) {
|
||||
self.contentView.hidden = NO;
|
||||
}
|
||||
if (self.kb_defaultGradientLayer) {
|
||||
self.kb_defaultGradientLayer.frame = self.bgImageView.bounds;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)viewWillTransitionToSize:(CGSize)size
|
||||
@@ -1693,94 +1805,133 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
#pragma mark - Theme
|
||||
|
||||
- (void)kb_applyTheme {
|
||||
KBSkinTheme *t = [KBSkinManager shared].current;
|
||||
UIImage *img = [[KBSkinManager shared] currentBackgroundImage];
|
||||
BOOL isDefaultTheme = [self kb_isDefaultKeyboardTheme:t];
|
||||
BOOL isDarkMode = [self kb_isDarkModeActive];
|
||||
CGSize size = self.bgImageView.bounds.size;
|
||||
if (isDefaultTheme) {
|
||||
if (isDarkMode) {
|
||||
@autoreleasepool {
|
||||
KBSkinTheme *t = [KBSkinManager shared].current;
|
||||
UIImage *img = nil;
|
||||
BOOL isDefaultTheme = [self kb_isDefaultKeyboardTheme:t];
|
||||
BOOL isDarkMode = [self kb_isDarkModeActive];
|
||||
|
||||
NSString *skinId = t.skinId ?: @"";
|
||||
NSString *themeKey =
|
||||
[NSString stringWithFormat:@"%@|default=%d|dark=%d",
|
||||
skinId, isDefaultTheme, isDarkMode];
|
||||
BOOL themeChanged =
|
||||
(self.kb_lastAppliedThemeKey.length == 0 ||
|
||||
![self.kb_lastAppliedThemeKey isEqualToString:themeKey]);
|
||||
if (themeChanged) {
|
||||
self.kb_lastAppliedThemeKey = themeKey;
|
||||
}
|
||||
|
||||
CGSize size = self.bgImageView.bounds.size;
|
||||
if (isDefaultTheme) {
|
||||
if (isDarkMode) {
|
||||
// 暗黑模式:直接使用背景色,不使用图片渲染
|
||||
// 这样可以避免图片渲染时的色彩空间转换导致颜色不一致
|
||||
img = nil;
|
||||
self.bgImageView.image = nil;
|
||||
[self.kb_defaultGradientLayer removeFromSuperlayer];
|
||||
self.kb_defaultGradientLayer = nil;
|
||||
// 使用与系统键盘底部完全相同的颜色
|
||||
if (@available(iOS 13.0, *)) {
|
||||
// iOS 系统键盘使用的实际颜色 (RGB: 44, 44, 46 in sRGB, 或 #2C2C2E)
|
||||
// 但为了完美匹配,我们使用动态颜色并直接设置为背景
|
||||
UIColor *kbBgColor =
|
||||
[UIColor colorWithDynamicProvider:^UIColor *_Nonnull(
|
||||
UITraitCollection *_Nonnull traitCollection) {
|
||||
if (traitCollection.userInterfaceStyle ==
|
||||
UIUserInterfaceStyleDark) {
|
||||
// 暗黑模式下系统键盘实际背景色
|
||||
return [UIColor colorWithRed:43.0 / 255.0
|
||||
green:43.0 / 255.0
|
||||
blue:43.0 / 255.0
|
||||
alpha:1.0];
|
||||
} else {
|
||||
return [UIColor colorWithRed:209.0 / 255.0
|
||||
green:211.0 / 255.0
|
||||
blue:219.0 / 255.0
|
||||
alpha:1.0];
|
||||
}
|
||||
}];
|
||||
self.contentView.backgroundColor = kbBgColor;
|
||||
self.bgImageView.backgroundColor = kbBgColor;
|
||||
} else {
|
||||
UIColor *darkColor = [UIColor colorWithRed:43.0 / 255.0
|
||||
green:43.0 / 255.0
|
||||
blue:43.0 / 255.0
|
||||
alpha:1.0];
|
||||
self.contentView.backgroundColor = darkColor;
|
||||
self.bgImageView.backgroundColor = darkColor;
|
||||
}
|
||||
// iOS 系统键盘使用的实际颜色 (RGB: 44, 44, 46 in sRGB, 或 #2C2C2E)
|
||||
// 但为了完美匹配,我们使用动态颜色并直接设置为背景
|
||||
UIColor *kbBgColor =
|
||||
[UIColor colorWithDynamicProvider:^UIColor *_Nonnull(
|
||||
UITraitCollection *_Nonnull traitCollection) {
|
||||
if (traitCollection.userInterfaceStyle ==
|
||||
UIUserInterfaceStyleDark) {
|
||||
// 暗黑模式下系统键盘实际背景色
|
||||
return [UIColor colorWithRed:43.0 / 255.0
|
||||
green:43.0 / 255.0
|
||||
blue:43.0 / 255.0
|
||||
alpha:1.0];
|
||||
} else {
|
||||
return [UIColor colorWithRed:209.0 / 255.0
|
||||
green:211.0 / 255.0
|
||||
blue:219.0 / 255.0
|
||||
alpha:1.0];
|
||||
}
|
||||
}];
|
||||
self.contentView.backgroundColor = kbBgColor;
|
||||
self.bgImageView.backgroundColor = kbBgColor;
|
||||
} else {
|
||||
UIColor *darkColor = [UIColor colorWithRed:43.0 / 255.0
|
||||
green:43.0 / 255.0
|
||||
blue:43.0 / 255.0
|
||||
alpha:1.0];
|
||||
self.contentView.backgroundColor = darkColor;
|
||||
self.bgImageView.backgroundColor = darkColor;
|
||||
}
|
||||
} else {
|
||||
// 浅色模式:使用渐变图片
|
||||
if (size.width <= 0 || size.height <= 0) {
|
||||
[self.view layoutIfNeeded];
|
||||
size = self.bgImageView.bounds.size;
|
||||
// 浅色模式:使用渐变层(避免生成大位图导致内存上涨)
|
||||
if (size.width <= 0 || size.height <= 0) {
|
||||
[self.view layoutIfNeeded];
|
||||
size = self.bgImageView.bounds.size;
|
||||
}
|
||||
if (size.width <= 0 || size.height <= 0) {
|
||||
size = self.view.bounds.size;
|
||||
}
|
||||
if (size.width <= 0 || size.height <= 0) {
|
||||
size = [UIScreen mainScreen].bounds.size;
|
||||
}
|
||||
UIColor *topColor = [UIColor colorWithHex:0xDEDFE4];
|
||||
UIColor *bottomColor = [UIColor colorWithHex:0xD1D3DB];
|
||||
UIColor *resolvedTopColor = topColor;
|
||||
UIColor *resolvedBottomColor = bottomColor;
|
||||
if (@available(iOS 13.0, *)) {
|
||||
resolvedTopColor =
|
||||
[topColor resolvedColorWithTraitCollection:self.traitCollection];
|
||||
resolvedBottomColor = [bottomColor
|
||||
resolvedColorWithTraitCollection:self.traitCollection];
|
||||
}
|
||||
CAGradientLayer *layer = self.kb_defaultGradientLayer;
|
||||
if (!layer) {
|
||||
layer = [CAGradientLayer layer];
|
||||
layer.startPoint = CGPointMake(0.5, 0.0);
|
||||
layer.endPoint = CGPointMake(0.5, 1.0);
|
||||
[self.bgImageView.layer insertSublayer:layer atIndex:0];
|
||||
self.kb_defaultGradientLayer = layer;
|
||||
}
|
||||
layer.colors = @[
|
||||
(id)resolvedTopColor.CGColor,
|
||||
(id)resolvedBottomColor.CGColor
|
||||
];
|
||||
layer.frame = (CGRect){CGPointZero, size};
|
||||
img = nil;
|
||||
self.bgImageView.image = nil;
|
||||
self.contentView.backgroundColor = [UIColor clearColor];
|
||||
self.bgImageView.backgroundColor = [UIColor clearColor];
|
||||
}
|
||||
if (size.width <= 0 || size.height <= 0) {
|
||||
size = self.view.bounds.size;
|
||||
}
|
||||
if (size.width <= 0 || size.height <= 0) {
|
||||
size = [UIScreen mainScreen].bounds.size;
|
||||
}
|
||||
UIColor *topColor = [UIColor colorWithHex:0xDEDFE4];
|
||||
UIColor *bottomColor = [UIColor colorWithHex:0xD1D3DB];
|
||||
img = [self kb_defaultGradientImageWithSize:size
|
||||
topColor:topColor
|
||||
bottomColor:bottomColor];
|
||||
NSLog(@"===");
|
||||
} else {
|
||||
// 自定义皮肤:清除背景色,使用皮肤图片
|
||||
self.contentView.backgroundColor = [UIColor clearColor];
|
||||
self.bgImageView.backgroundColor = [UIColor clearColor];
|
||||
[self.kb_defaultGradientLayer removeFromSuperlayer];
|
||||
self.kb_defaultGradientLayer = nil;
|
||||
img = [[KBSkinManager shared] currentBackgroundImage];
|
||||
}
|
||||
NSLog(@"===");
|
||||
} else {
|
||||
// 自定义皮肤:清除背景色,使用皮肤图片
|
||||
self.contentView.backgroundColor = [UIColor clearColor];
|
||||
self.bgImageView.backgroundColor = [UIColor clearColor];
|
||||
}
|
||||
NSLog(@"⌨️[Keyboard] apply theme id=%@ hasBg=%d", t.skinId, (img != nil));
|
||||
[self kb_logSkinDiagnosticsWithTheme:t backgroundImage:img];
|
||||
self.bgImageView.image = img;
|
||||
self.personaGrayImageView.image = img;
|
||||
NSLog(@"⌨️[Keyboard] apply theme id=%@ hasBg=%d", t.skinId, (img != nil));
|
||||
[self kb_logSkinDiagnosticsWithTheme:t backgroundImage:img];
|
||||
self.bgImageView.image = img;
|
||||
|
||||
// [self.chatPanelView kb_setBackgroundImage:img];
|
||||
BOOL hasImg = (img != nil);
|
||||
// 触发键区按主题重绘
|
||||
if ([self.keyBoardMainView respondsToSelector:@selector(kb_applyTheme)]) {
|
||||
// 触发键区按主题重绘
|
||||
if (themeChanged &&
|
||||
[self.keyBoardMainView respondsToSelector:@selector(kb_applyTheme)]) {
|
||||
// method declared in KBKeyBoardMainView.h
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
|
||||
[self.keyBoardMainView performSelector:@selector(kb_applyTheme)];
|
||||
[self.keyBoardMainView performSelector:@selector(kb_applyTheme)];
|
||||
#pragma clang diagnostic pop
|
||||
}
|
||||
if ([self.functionView respondsToSelector:@selector(kb_applyTheme)]) {
|
||||
}
|
||||
// 注意:这里不能直接访问 self.functionView,否则会导致功能面板提前创建,占用内存。
|
||||
if (themeChanged && _functionView &&
|
||||
[_functionView respondsToSelector:@selector(kb_applyTheme)]) {
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
|
||||
[self.functionView performSelector:@selector(kb_applyTheme)];
|
||||
[_functionView performSelector:@selector(kb_applyTheme)];
|
||||
#pragma clang diagnostic pop
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1819,8 +1970,12 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
return nil;
|
||||
}
|
||||
|
||||
// 将动态颜色解析为当前 trait collection 下的具体颜色值
|
||||
// 否则在 UIGraphicsBeginImageContextWithOptions 中渲染时会使用默认的浅色模式
|
||||
// 尺寸未变则复用缓存,避免反复创建图片撑爆键盘扩展内存
|
||||
if (self.kb_cachedGradientImage &&
|
||||
CGSizeEqualToSize(self.kb_cachedGradientSize, size)) {
|
||||
return self.kb_cachedGradientImage;
|
||||
}
|
||||
|
||||
UIColor *resolvedTopColor = topColor;
|
||||
UIColor *resolvedBottomColor = bottomColor;
|
||||
if (@available(iOS 13.0, *)) {
|
||||
@@ -1841,6 +1996,9 @@ static void KBSkinInstallNotificationCallback(CFNotificationCenterRef center,
|
||||
[layer renderInContext:UIGraphicsGetCurrentContext()];
|
||||
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
self.kb_cachedGradientImage = image;
|
||||
self.kb_cachedGradientSize = size;
|
||||
return image;
|
||||
}
|
||||
|
||||
|
||||
@@ -49,7 +49,6 @@ static const CGFloat kKBLettersRow2EdgeSpacerMultiplier = 0.5;
|
||||
self.layoutConfig = [KBKeyboardLayoutConfig sharedConfig];
|
||||
self.backspaceHandler = [[KBBackspaceLongPressHandler alloc] initWithContainerView:self];
|
||||
[self buildBase];
|
||||
[self reloadKeys];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
@@ -594,7 +593,7 @@ edgeSpacerMultiplier:(CGFloat)edgeSpacerMultiplier {
|
||||
[btn setTitle:key.title forState:UIControlStateNormal];
|
||||
// 在设置完标题后,按当前皮肤应用图标与文字显隐
|
||||
[btn applyThemeForCurrentKey];
|
||||
[btn addTarget:self action:@selector(onKeyTapped:) forControlEvents:UIControlEventTouchUpInside];
|
||||
[btn addTarget:self action:@selector(onKeyTapped:) forControlEvents:UIControlEventTouchDown];
|
||||
[row addSubview:btn];
|
||||
|
||||
if (key.type == KBKeyTypeBackspace) {
|
||||
@@ -920,7 +919,7 @@ edgeSpacerMultiplier:(CGFloat)edgeSpacerMultiplier {
|
||||
}
|
||||
|
||||
[btn applyThemeForCurrentKey];
|
||||
[btn addTarget:self action:@selector(onKeyTapped:) forControlEvents:UIControlEventTouchUpInside];
|
||||
[btn addTarget:self action:@selector(onKeyTapped:) forControlEvents:UIControlEventTouchDown];
|
||||
|
||||
if (key.type == KBKeyTypeBackspace) {
|
||||
[self.backspaceHandler bindDeleteButton:btn showClearLabel:YES];
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
#import "KBResponderUtils.h" // 查找 UIInputViewController,用于系统切换输入法
|
||||
#import "KBBackspaceUndoManager.h"
|
||||
#import "KBSkinManager.h"
|
||||
#import <ImageIO/ImageIO.h>
|
||||
|
||||
@interface KBToolBar ()
|
||||
@property (nonatomic, strong) UIView *leftContainer;
|
||||
@@ -20,6 +21,8 @@
|
||||
@property (nonatomic, assign) BOOL kbNeedsInputModeSwitchKey;
|
||||
@property (nonatomic, assign) BOOL kbUndoVisible;
|
||||
@property (nonatomic, assign) BOOL kbAvatarVisible;
|
||||
@property (nonatomic, copy, nullable) NSString *kb_cachedPersonaCoverPath;
|
||||
@property (nonatomic, strong, nullable) UIImage *kb_cachedPersonaCoverImage;
|
||||
@end
|
||||
|
||||
@implementation KBToolBar
|
||||
@@ -256,10 +259,41 @@
|
||||
[[containerURL path] stringByAppendingPathComponent:@"persona_cover.jpg"];
|
||||
if (imagePath.length == 0 ||
|
||||
![[NSFileManager defaultManager] fileExistsAtPath:imagePath]) {
|
||||
self.kb_cachedPersonaCoverPath = nil;
|
||||
self.kb_cachedPersonaCoverImage = nil;
|
||||
return nil;
|
||||
}
|
||||
|
||||
return [UIImage imageWithContentsOfFile:imagePath];
|
||||
if (self.kb_cachedPersonaCoverImage &&
|
||||
[self.kb_cachedPersonaCoverPath isEqualToString:imagePath]) {
|
||||
return self.kb_cachedPersonaCoverImage;
|
||||
}
|
||||
|
||||
// 头像仅 40pt,直接按像素上限缩略解码,避免每次显示键盘都 full decode 一张大 JPG 顶爆扩展内存。
|
||||
NSUInteger maxPixel = 256;
|
||||
NSURL *url = [NSURL fileURLWithPath:imagePath];
|
||||
CGImageSourceRef source = CGImageSourceCreateWithURL((__bridge CFURLRef)url, NULL);
|
||||
if (!source) {
|
||||
return nil;
|
||||
}
|
||||
NSDictionary *opts = @{
|
||||
(__bridge id)kCGImageSourceCreateThumbnailFromImageAlways : @YES,
|
||||
(__bridge id)kCGImageSourceCreateThumbnailWithTransform : @YES,
|
||||
(__bridge id)kCGImageSourceThumbnailMaxPixelSize : @(maxPixel),
|
||||
};
|
||||
CGImageRef cg = CGImageSourceCreateThumbnailAtIndex(source, 0, (__bridge CFDictionaryRef)opts);
|
||||
CFRelease(source);
|
||||
if (!cg) {
|
||||
return nil;
|
||||
}
|
||||
UIImage *img = [UIImage imageWithCGImage:cg
|
||||
scale:[UIScreen mainScreen].scale
|
||||
orientation:UIImageOrientationUp];
|
||||
CGImageRelease(cg);
|
||||
|
||||
self.kb_cachedPersonaCoverPath = imagePath;
|
||||
self.kb_cachedPersonaCoverImage = img;
|
||||
return img;
|
||||
}
|
||||
|
||||
#pragma mark - Actions
|
||||
@@ -419,7 +453,7 @@
|
||||
- (void)kb_updateRightControlsConstraints {
|
||||
[self.avatarImageView mas_remakeConstraints:^(MASConstraintMaker *make) {
|
||||
make.right.equalTo(self).offset(-12);
|
||||
make.centerY.equalTo(self).offset(5);
|
||||
make.centerY.equalTo(self).offset(0);
|
||||
make.width.height.mas_equalTo(kKBAvatarSize);
|
||||
}];
|
||||
[self.undoButtonInternal mas_remakeConstraints:^(MASConstraintMaker *make) {
|
||||
|
||||
@@ -54,6 +54,9 @@ extern NSString * const KBDarwinSkinChanged; // cross-process
|
||||
/// 当前背景图片(若存在)
|
||||
- (nullable UIImage *)currentBackgroundImage;
|
||||
|
||||
/// 清理运行时图片缓存(内存缓存)。键盘扩展接近内存上限时可主动调用。
|
||||
- (void)clearRuntimeImageCaches;
|
||||
|
||||
/// 当前主题下,指定按键标识的文字是否应被隐藏(例如图标里已包含字母)
|
||||
- (BOOL)shouldHideKeyTextForIdentifier:(nullable NSString *)identifier;
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
#import "KBSkinManager.h"
|
||||
#import "KBConfig.h"
|
||||
#import <ImageIO/ImageIO.h>
|
||||
|
||||
NSString * const KBSkinDidChangeNotification = @"KBSkinDidChangeNotification";
|
||||
NSString * const KBDarwinSkinChanged = @"com.loveKey.nyx.skin.changed";
|
||||
@@ -59,10 +60,45 @@ static NSString * const kKBSkinThemeStoreKey = @"KBSkinThemeCurrent";
|
||||
|
||||
@interface KBSkinManager ()
|
||||
@property (atomic, strong, readwrite) KBSkinTheme *current;
|
||||
@property (nonatomic, strong) NSCache<NSString *, UIImage *> *kb_fileImageCache;
|
||||
@property (nonatomic, copy, nullable) NSString *kb_cachedBgSkinId;
|
||||
@property (nonatomic, assign) BOOL kb_cachedBgResolved;
|
||||
@property (nonatomic, strong, nullable) UIImage *kb_cachedBgImage;
|
||||
@end
|
||||
|
||||
@implementation KBSkinManager
|
||||
|
||||
/// 从文件路径解码图片,并按 maxPixel 限制最长边像素(避免加载超大背景图导致键盘扩展内存飙升)。
|
||||
+ (nullable UIImage *)kb_imageAtPath:(NSString *)path maxPixel:(NSUInteger)maxPixel {
|
||||
if (path.length == 0) return nil;
|
||||
NSURL *url = [NSURL fileURLWithPath:path];
|
||||
CGImageSourceRef source = CGImageSourceCreateWithURL((__bridge CFURLRef)url, NULL);
|
||||
if (!source) return nil;
|
||||
NSDictionary *opts = @{
|
||||
(__bridge id)kCGImageSourceCreateThumbnailFromImageAlways : @YES,
|
||||
(__bridge id)kCGImageSourceCreateThumbnailWithTransform : @YES,
|
||||
(__bridge id)kCGImageSourceThumbnailMaxPixelSize : @(MAX(1, (NSInteger)maxPixel)),
|
||||
};
|
||||
CGImageRef cg = CGImageSourceCreateThumbnailAtIndex(source, 0, (__bridge CFDictionaryRef)opts);
|
||||
CFRelease(source);
|
||||
if (!cg) return nil;
|
||||
UIImage *img = [UIImage imageWithCGImage:cg scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUp];
|
||||
CGImageRelease(cg);
|
||||
return img;
|
||||
}
|
||||
|
||||
static inline NSUInteger KBApproxImageCostBytes(UIImage *img) {
|
||||
if (!img) return 0;
|
||||
CGFloat scale = img.scale > 0 ? img.scale : [UIScreen mainScreen].scale;
|
||||
CGSize s = img.size;
|
||||
double px = (double)s.width * scale * (double)s.height * scale;
|
||||
if (px <= 0) return 0;
|
||||
// RGBA 4 bytes/pixel
|
||||
double cost = px * 4.0;
|
||||
if (cost > (double)NSUIntegerMax) return NSUIntegerMax;
|
||||
return (NSUInteger)cost;
|
||||
}
|
||||
|
||||
/// 返回所有可能的皮肤根目录(优先 App Group,其次当前进程的 Caches)。
|
||||
+ (NSArray<NSString *> *)kb_candidateBaseRoots {
|
||||
NSMutableArray<NSString *> *roots = [NSMutableArray array];
|
||||
@@ -104,6 +140,14 @@ static NSString * const kKBSkinThemeStoreKey = @"KBSkinThemeCurrent";
|
||||
|
||||
- (instancetype)init {
|
||||
if (self = [super init]) {
|
||||
_kb_fileImageCache = [NSCache new];
|
||||
// 键盘扩展内存上限较小,缓存要保守一些;主 App 也共用该实现但不会出问题。
|
||||
// iPad 的键盘背景可能更大,适当放宽。
|
||||
if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {
|
||||
_kb_fileImageCache.totalCostLimit = 24 * 1024 * 1024;
|
||||
} else {
|
||||
_kb_fileImageCache.totalCostLimit = 12 * 1024 * 1024;
|
||||
}
|
||||
KBSkinTheme *t = [self p_loadFromStore];
|
||||
// 若存储中的皮肤在 App Group 中找不到对应资源目录(如首次安装 / 已被清理),则回退到默认皮肤。
|
||||
if (!t || ![self.class kb_hasAssetsForSkinId:t.skinId]) {
|
||||
@@ -170,6 +214,7 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
- (BOOL)applyTheme:(KBSkinTheme *)theme {
|
||||
if (!theme) return NO;
|
||||
NSLog(@"🎨[SkinManager] apply theme id=%@ name=%@", theme.skinId, theme.name);
|
||||
[self clearRuntimeImageCaches];
|
||||
// 将主题写入 App Group 存储(失败也不影响本次进程内的使用)
|
||||
[self p_saveToStore:theme];
|
||||
// 始终更新当前主题并广播通知,确保当前进程和扩展之间保持同步。
|
||||
@@ -187,6 +232,15 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
[self applyTheme:[self.class defaultTheme]];
|
||||
}
|
||||
|
||||
- (void)clearRuntimeImageCaches {
|
||||
@synchronized (self) {
|
||||
[self.kb_fileImageCache removeAllObjects];
|
||||
self.kb_cachedBgSkinId = nil;
|
||||
self.kb_cachedBgResolved = NO;
|
||||
self.kb_cachedBgImage = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)applyImageSkinWithData:(NSData *)imageData skinId:(NSString *)skinId name:(NSString *)name {
|
||||
// 仅作为“存在背景图”的标记使用:图像文件本身存放在 App Group 容器
|
||||
// Skins/<skinId>/background.png 中,这里不再把二进制图片写入 Keychain,
|
||||
@@ -216,20 +270,52 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
NSString *skinId = self.current.skinId;
|
||||
if (skinId.length == 0) return nil;
|
||||
|
||||
// 同一个 skinId 在键盘的生命周期内会被频繁读取;缓存一份避免反复解码导致内存上涨。
|
||||
@synchronized (self) {
|
||||
if (self.kb_cachedBgResolved && [self.kb_cachedBgSkinId isEqualToString:skinId]) {
|
||||
return self.kb_cachedBgImage;
|
||||
}
|
||||
}
|
||||
|
||||
NSArray<NSString *> *roots = [self.class kb_candidateBaseRoots];
|
||||
NSFileManager *fm = [NSFileManager defaultManager];
|
||||
NSString *relative = [NSString stringWithFormat:@"Skins/%@/background.png", skinId];
|
||||
|
||||
// 背景图通常远大于键盘实际显示区域,按像素上限做缩略解码,显著降低扩展内存占用。
|
||||
NSUInteger maxPixel = (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) ? 2048 : 1024;
|
||||
for (NSString *base in roots) {
|
||||
NSString *bgPath = [[base stringByAppendingPathComponent:relative] stringByStandardizingPath];
|
||||
BOOL isDir = NO;
|
||||
if (![fm fileExistsAtPath:bgPath isDirectory:&isDir] || isDir) {
|
||||
continue;
|
||||
}
|
||||
NSData *data = [NSData dataWithContentsOfFile:bgPath];
|
||||
if (data.length == 0) continue;
|
||||
UIImage *img = [UIImage imageWithData:data scale:[UIScreen mainScreen].scale];
|
||||
if (img) return img;
|
||||
NSString *cacheKey = [NSString stringWithFormat:@"bg|%@", bgPath];
|
||||
UIImage *cached = [self.kb_fileImageCache objectForKey:cacheKey];
|
||||
if (cached) {
|
||||
@synchronized (self) {
|
||||
self.kb_cachedBgSkinId = skinId;
|
||||
self.kb_cachedBgResolved = YES;
|
||||
self.kb_cachedBgImage = cached;
|
||||
}
|
||||
return cached;
|
||||
}
|
||||
|
||||
UIImage *img = [self.class kb_imageAtPath:bgPath maxPixel:maxPixel];
|
||||
if (img) {
|
||||
NSUInteger cost = KBApproxImageCostBytes(img);
|
||||
[self.kb_fileImageCache setObject:img forKey:cacheKey cost:cost];
|
||||
@synchronized (self) {
|
||||
self.kb_cachedBgSkinId = skinId;
|
||||
self.kb_cachedBgResolved = YES;
|
||||
self.kb_cachedBgImage = img;
|
||||
}
|
||||
return img;
|
||||
}
|
||||
}
|
||||
@synchronized (self) {
|
||||
self.kb_cachedBgSkinId = skinId;
|
||||
self.kb_cachedBgResolved = YES;
|
||||
self.kb_cachedBgImage = nil;
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
@@ -314,7 +400,13 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
if (![fm fileExistsAtPath:fullPath isDirectory:&isDir] || isDir) {
|
||||
continue;
|
||||
}
|
||||
UIImage *img = [UIImage imageWithContentsOfFile:fullPath];
|
||||
NSString *cacheKey = [NSString stringWithFormat:@"icon|%@", fullPath];
|
||||
UIImage *img = [self.kb_fileImageCache objectForKey:cacheKey];
|
||||
if (img) return img;
|
||||
img = [UIImage imageWithContentsOfFile:fullPath];
|
||||
if (img) {
|
||||
[self.kb_fileImageCache setObject:img forKey:cacheKey cost:KBApproxImageCostBytes(img)];
|
||||
}
|
||||
if (img) return img;
|
||||
}
|
||||
#if DEBUG
|
||||
@@ -351,7 +443,13 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
NSString *fullPath = [[base stringByAppendingPathComponent:relative] stringByStandardizingPath];
|
||||
BOOL isDir = NO;
|
||||
if ([fm fileExistsAtPath:fullPath isDirectory:&isDir] && !isDir) {
|
||||
UIImage *img = [UIImage imageWithContentsOfFile:fullPath];
|
||||
NSString *cacheKey = [NSString stringWithFormat:@"icon|%@", fullPath];
|
||||
UIImage *img = [self.kb_fileImageCache objectForKey:cacheKey];
|
||||
if (img) return img;
|
||||
img = [UIImage imageWithContentsOfFile:fullPath];
|
||||
if (img) {
|
||||
[self.kb_fileImageCache setObject:img forKey:cacheKey cost:KBApproxImageCostBytes(img)];
|
||||
}
|
||||
if (img) return img;
|
||||
}
|
||||
}
|
||||
@@ -363,7 +461,13 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
NSString *fullPath = [[base stringByAppendingPathComponent:relative] stringByStandardizingPath];
|
||||
BOOL isDir = NO;
|
||||
if ([fm fileExistsAtPath:fullPath isDirectory:&isDir] && !isDir) {
|
||||
UIImage *img = [UIImage imageWithContentsOfFile:fullPath];
|
||||
NSString *cacheKey = [NSString stringWithFormat:@"icon|%@", fullPath];
|
||||
UIImage *img = [self.kb_fileImageCache objectForKey:cacheKey];
|
||||
if (img) return img;
|
||||
img = [UIImage imageWithContentsOfFile:fullPath];
|
||||
if (img) {
|
||||
[self.kb_fileImageCache setObject:img forKey:cacheKey cost:KBApproxImageCostBytes(img)];
|
||||
}
|
||||
if (img) return img;
|
||||
}
|
||||
}
|
||||
@@ -449,6 +553,7 @@ static void KBSkinDarwinCallback(CFNotificationCenterRef center, void *observer,
|
||||
if (!t || ![self.class kb_hasAssetsForSkinId:t.skinId]) {
|
||||
t = [self.class defaultTheme];
|
||||
}
|
||||
[self clearRuntimeImageCaches];
|
||||
self.current = t;
|
||||
if (broadcast) {
|
||||
[[NSNotificationCenter defaultCenter] postNotificationName:KBSkinDidChangeNotification object:nil];
|
||||
|
||||
@@ -57,16 +57,8 @@
|
||||
046086752F191CC700757C95 /* AI技术分析.txt in Resources */ = {isa = PBXBuildFile; fileRef = 046086742F191CC700757C95 /* AI技术分析.txt */; };
|
||||
0460869A2F19238500757C95 /* KBAiWaveformView.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086992F19238500757C95 /* KBAiWaveformView.m */; };
|
||||
0460869C2F19238500757C95 /* KBAiRecordButton.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086972F19238500757C95 /* KBAiRecordButton.m */; };
|
||||
046086B12F19239B00757C95 /* SubtitleSync.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086AC2F19239B00757C95 /* SubtitleSync.m */; };
|
||||
046086B22F19239B00757C95 /* TTSServiceClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086B02F19239B00757C95 /* TTSServiceClient.m */; };
|
||||
046086B32F19239B00757C95 /* AudioSessionManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A22F19239B00757C95 /* AudioSessionManager.m */; };
|
||||
046086B42F19239B00757C95 /* LLMStreamClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A82F19239B00757C95 /* LLMStreamClient.m */; };
|
||||
046086B52F19239B00757C95 /* Segmenter.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086AA2F19239B00757C95 /* Segmenter.m */; };
|
||||
046086B62F19239B00757C95 /* TTSPlaybackPipeline.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086AE2F19239B00757C95 /* TTSPlaybackPipeline.m */; };
|
||||
046086B72F19239B00757C95 /* ConversationOrchestrator.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A62F19239B00757C95 /* ConversationOrchestrator.m */; };
|
||||
046086B82F19239B00757C95 /* ASRStreamClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 0460869E2F19239B00757C95 /* ASRStreamClient.m */; };
|
||||
046086B92F19239B00757C95 /* AudioCaptureManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A02F19239B00757C95 /* AudioCaptureManager.m */; };
|
||||
046086BA2F19239B00757C95 /* AudioStreamPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086A42F19239B00757C95 /* AudioStreamPlayer.m */; };
|
||||
046086BD2F1A039F00757C95 /* KBAICommentView.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086BC2F1A039F00757C95 /* KBAICommentView.m */; };
|
||||
046086CB2F1A092500757C95 /* comments_mock.json in Resources */ = {isa = PBXBuildFile; fileRef = 046086C62F1A092500757C95 /* comments_mock.json */; };
|
||||
046086CC2F1A092500757C95 /* KBAIReplyModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 046086CA2F1A092500757C95 /* KBAIReplyModel.m */; };
|
||||
@@ -78,7 +70,6 @@
|
||||
046131142ECF454500A6FADF /* KBKeyPreviewView.m in Sources */ = {isa = PBXBuildFile; fileRef = 046131132ECF454500A6FADF /* KBKeyPreviewView.m */; };
|
||||
0477BDF02EBB76E30055D639 /* HomeSheetVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 0477BDEF2EBB76E30055D639 /* HomeSheetVC.m */; };
|
||||
0477BDF32EBB7B850055D639 /* KBDirectionIndicatorView.m in Sources */ = {isa = PBXBuildFile; fileRef = 0477BDF22EBB7B850055D639 /* KBDirectionIndicatorView.m */; };
|
||||
0477BDF72EBC63A80055D639 /* KBTestVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 0477BDF62EBC63A80055D639 /* KBTestVC.m */; };
|
||||
0477BDFA2EBC66340055D639 /* HomeHeadView.m in Sources */ = {isa = PBXBuildFile; fileRef = 0477BDF92EBC66340055D639 /* HomeHeadView.m */; };
|
||||
0477BDFD2EBC6A170055D639 /* HomeHotVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 0477BDFC2EBC6A170055D639 /* HomeHotVC.m */; };
|
||||
0477BE002EBC6A330055D639 /* HomeRankVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 0477BDFF2EBC6A330055D639 /* HomeRankVC.m */; };
|
||||
@@ -140,17 +131,15 @@
|
||||
048FFD112F27432D005D62AE /* KBPersonaPageModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD0F2F27432D005D62AE /* KBPersonaPageModel.m */; };
|
||||
048FFD142F274342005D62AE /* KBPersonaChatCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD132F274342005D62AE /* KBPersonaChatCell.m */; };
|
||||
048FFD182F2763A5005D62AE /* KBVoiceInputBar.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD172F2763A5005D62AE /* KBVoiceInputBar.m */; };
|
||||
048FFD1D2F277486005D62AE /* KBChatHistoryPageModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD1C2F277486005D62AE /* KBChatHistoryPageModel.m */; };
|
||||
048FFD1E2F277486005D62AE /* KBChatHistoryModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD1A2F277486005D62AE /* KBChatHistoryModel.m */; };
|
||||
048FFD242F28A836005D62AE /* KBChatLimitPopView.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD232F28A836005D62AE /* KBChatLimitPopView.m */; };
|
||||
A1B2C9302FCA000100000001 /* KBChatLimitPopView.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD232F28A836005D62AE /* KBChatLimitPopView.m */; };
|
||||
048FFD272F28C6CF005D62AE /* KBImagePositionButton.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD262F28C6CF005D62AE /* KBImagePositionButton.m */; };
|
||||
048FFD2A2F28E99A005D62AE /* KBCommentModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD292F28E99A005D62AE /* KBCommentModel.m */; };
|
||||
048FFD2D2F29F356005D62AE /* KBAIMessageVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD2C2F29F356005D62AE /* KBAIMessageVC.m */; };
|
||||
048FFD302F29F3C3005D62AE /* KBAIMessageZanVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD2F2F29F3C3005D62AE /* KBAIMessageZanVC.m */; };
|
||||
048FFD1D2F277486005D62AE /* KBChatHistoryPageModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD1C2F277486005D62AE /* KBChatHistoryPageModel.m */; };
|
||||
048FFD1E2F277486005D62AE /* KBChatHistoryModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD1A2F277486005D62AE /* KBChatHistoryModel.m */; };
|
||||
048FFD242F28A836005D62AE /* KBChatLimitPopView.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD232F28A836005D62AE /* KBChatLimitPopView.m */; };
|
||||
048FFD272F28C6CF005D62AE /* KBImagePositionButton.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD262F28C6CF005D62AE /* KBImagePositionButton.m */; };
|
||||
048FFD2A2F28E99A005D62AE /* KBCommentModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD292F28E99A005D62AE /* KBCommentModel.m */; };
|
||||
048FFD2D2F29F356005D62AE /* KBAIMessageVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD2C2F29F356005D62AE /* KBAIMessageVC.m */; };
|
||||
048FFD302F29F3C3005D62AE /* KBAIMessageZanVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD2F2F29F3C3005D62AE /* KBAIMessageZanVC.m */; };
|
||||
048FFD332F29F3D2005D62AE /* KBAIMessageChatingVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD322F29F3D2005D62AE /* KBAIMessageChatingVC.m */; };
|
||||
048FFD342F29F400005D62AE /* KBAIMessageListVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD362F29F400005D62AE /* KBAIMessageListVC.m */; };
|
||||
048FFD362F29F88E005D62AE /* AIMessageVM.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD352F29F88E005D62AE /* AIMessageVM.m */; };
|
||||
048FFD372F29F410005D62AE /* KBAIMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD392F29F410005D62AE /* KBAIMessageCell.m */; };
|
||||
048FFD392F2A24C5005D62AE /* KBAIChatMessageCacheManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD382F2A24C5005D62AE /* KBAIChatMessageCacheManager.m */; };
|
||||
048FFD3C2F29F500005D62AE /* KBLikedCompanionModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD3B2F29F500005D62AE /* KBLikedCompanionModel.m */; };
|
||||
@@ -209,6 +198,9 @@
|
||||
04A9FE202EB893F10020DB6D /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 04A9FE1E2EB893F10020DB6D /* Localizable.strings */; };
|
||||
04A9FE212EB893F10020DB6D /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 04A9FE1E2EB893F10020DB6D /* Localizable.strings */; };
|
||||
04B5A1A22EEFA12300AAAAAA /* KBPayProductModel.m in Sources */ = {isa = PBXBuildFile; fileRef = 04B5A1A12EEFA12300AAAAAA /* KBPayProductModel.m */; };
|
||||
04BBF89D2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 04BBF89A2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m */; };
|
||||
04BBF89E2F3ACD8800B1FBB2 /* KBTestVC.m in Sources */ = {isa = PBXBuildFile; fileRef = 04BBF89C2F3ACD8800B1FBB2 /* KBTestVC.m */; };
|
||||
04BBF9002F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 04BBF8FF2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m */; };
|
||||
04C6EABA2EAF86530089C901 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 04C6EAAE2EAF86530089C901 /* Assets.xcassets */; };
|
||||
04C6EABC2EAF86530089C901 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 04C6EAB12EAF86530089C901 /* LaunchScreen.storyboard */; };
|
||||
04C6EABD2EAF86530089C901 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 04C6EAB42EAF86530089C901 /* Main.storyboard */; };
|
||||
@@ -222,10 +214,7 @@
|
||||
04D1F6B32EDFF10A00B12345 /* KBSkinInstallBridge.m in Sources */ = {isa = PBXBuildFile; fileRef = 04D1F6B12EDFF10A00B12345 /* KBSkinInstallBridge.m */; };
|
||||
04E0383E2F1A7C30002CA5A0 /* KBCustomTabBar.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E0383D2F1A7C30002CA5A0 /* KBCustomTabBar.m */; };
|
||||
04E038D82F20BFFB002CA5A0 /* websocket-api.md in Resources */ = {isa = PBXBuildFile; fileRef = 04E038D72F20BFFB002CA5A0 /* websocket-api.md */; };
|
||||
04E038DD2F20C420002CA5A0 /* VoiceChatStreamingManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038DA2F20C420002CA5A0 /* VoiceChatStreamingManager.m */; };
|
||||
04E038DE2F20C420002CA5A0 /* VoiceChatWebSocketClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038DC2F20C420002CA5A0 /* VoiceChatWebSocketClient.m */; };
|
||||
04E038E32F20E500002CA5A0 /* deepgramAPI.md in Resources */ = {isa = PBXBuildFile; fileRef = 04E038E22F20E500002CA5A0 /* deepgramAPI.md */; };
|
||||
04E038E82F20E877002CA5A0 /* DeepgramWebSocketClient.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038E72F20E877002CA5A0 /* DeepgramWebSocketClient.m */; };
|
||||
04E038E92F20E877002CA5A0 /* DeepgramStreamingManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038E52F20E877002CA5A0 /* DeepgramStreamingManager.m */; };
|
||||
04E038EF2F21F0EC002CA5A0 /* AiVM.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E038EE2F21F0EC002CA5A0 /* AiVM.m */; };
|
||||
04E0394B2F236E75002CA5A0 /* KBChatUserMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 04E0394A2F236E75002CA5A0 /* KBChatUserMessageCell.m */; };
|
||||
@@ -302,6 +291,7 @@
|
||||
A1B2C9262FC9000100000001 /* KBChatMessage.m in Sources */ = {isa = PBXBuildFile; fileRef = A1B2C9212FC9000100000001 /* KBChatMessage.m */; };
|
||||
A1B2C9272FC9000100000001 /* KBChatMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = A1B2C9232FC9000100000001 /* KBChatMessageCell.m */; };
|
||||
A1B2C9282FC9000100000001 /* KBChatPanelView.m in Sources */ = {isa = PBXBuildFile; fileRef = A1B2C9252FC9000100000001 /* KBChatPanelView.m */; };
|
||||
A1B2C9302FCA000100000001 /* KBChatLimitPopView.m in Sources */ = {isa = PBXBuildFile; fileRef = 048FFD232F28A836005D62AE /* KBChatLimitPopView.m */; };
|
||||
A1B2D7022EB8C00100000001 /* KBLangTestVC.m in Sources */ = {isa = PBXBuildFile; fileRef = A1B2D7012EB8C00100000001 /* KBLangTestVC.m */; };
|
||||
A1B2E1012EBC7AAA00000001 /* KBTopThreeView.m in Sources */ = {isa = PBXBuildFile; fileRef = A1B2E0022EBC7AAA00000001 /* KBTopThreeView.m */; };
|
||||
A1B2E1022EBC7AAA00000001 /* HomeHotCell.m in Sources */ = {isa = PBXBuildFile; fileRef = A1B2E0042EBC7AAA00000001 /* HomeHotCell.m */; };
|
||||
@@ -409,26 +399,10 @@
|
||||
046086972F19238500757C95 /* KBAiRecordButton.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAiRecordButton.m; sourceTree = "<group>"; };
|
||||
046086982F19238500757C95 /* KBAiWaveformView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAiWaveformView.h; sourceTree = "<group>"; };
|
||||
046086992F19238500757C95 /* KBAiWaveformView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAiWaveformView.m; sourceTree = "<group>"; };
|
||||
0460869D2F19239B00757C95 /* ASRStreamClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ASRStreamClient.h; sourceTree = "<group>"; };
|
||||
0460869E2F19239B00757C95 /* ASRStreamClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ASRStreamClient.m; sourceTree = "<group>"; };
|
||||
0460869F2F19239B00757C95 /* AudioCaptureManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioCaptureManager.h; sourceTree = "<group>"; };
|
||||
046086A02F19239B00757C95 /* AudioCaptureManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioCaptureManager.m; sourceTree = "<group>"; };
|
||||
046086A12F19239B00757C95 /* AudioSessionManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioSessionManager.h; sourceTree = "<group>"; };
|
||||
046086A22F19239B00757C95 /* AudioSessionManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioSessionManager.m; sourceTree = "<group>"; };
|
||||
046086A32F19239B00757C95 /* AudioStreamPlayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioStreamPlayer.h; sourceTree = "<group>"; };
|
||||
046086A42F19239B00757C95 /* AudioStreamPlayer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioStreamPlayer.m; sourceTree = "<group>"; };
|
||||
046086A52F19239B00757C95 /* ConversationOrchestrator.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ConversationOrchestrator.h; sourceTree = "<group>"; };
|
||||
046086A62F19239B00757C95 /* ConversationOrchestrator.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ConversationOrchestrator.m; sourceTree = "<group>"; };
|
||||
046086A72F19239B00757C95 /* LLMStreamClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = LLMStreamClient.h; sourceTree = "<group>"; };
|
||||
046086A82F19239B00757C95 /* LLMStreamClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = LLMStreamClient.m; sourceTree = "<group>"; };
|
||||
046086A92F19239B00757C95 /* Segmenter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Segmenter.h; sourceTree = "<group>"; };
|
||||
046086AA2F19239B00757C95 /* Segmenter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Segmenter.m; sourceTree = "<group>"; };
|
||||
046086AB2F19239B00757C95 /* SubtitleSync.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SubtitleSync.h; sourceTree = "<group>"; };
|
||||
046086AC2F19239B00757C95 /* SubtitleSync.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SubtitleSync.m; sourceTree = "<group>"; };
|
||||
046086AD2F19239B00757C95 /* TTSPlaybackPipeline.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TTSPlaybackPipeline.h; sourceTree = "<group>"; };
|
||||
046086AE2F19239B00757C95 /* TTSPlaybackPipeline.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TTSPlaybackPipeline.m; sourceTree = "<group>"; };
|
||||
046086AF2F19239B00757C95 /* TTSServiceClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TTSServiceClient.h; sourceTree = "<group>"; };
|
||||
046086B02F19239B00757C95 /* TTSServiceClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TTSServiceClient.m; sourceTree = "<group>"; };
|
||||
046086BB2F1A039F00757C95 /* KBAICommentView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAICommentView.h; sourceTree = "<group>"; };
|
||||
046086BC2F1A039F00757C95 /* KBAICommentView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAICommentView.m; sourceTree = "<group>"; };
|
||||
046086C62F1A092500757C95 /* comments_mock.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = comments_mock.json; sourceTree = "<group>"; };
|
||||
@@ -450,8 +424,6 @@
|
||||
0477BDEF2EBB76E30055D639 /* HomeSheetVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = HomeSheetVC.m; sourceTree = "<group>"; };
|
||||
0477BDF12EBB7B850055D639 /* KBDirectionIndicatorView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBDirectionIndicatorView.h; sourceTree = "<group>"; };
|
||||
0477BDF22EBB7B850055D639 /* KBDirectionIndicatorView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBDirectionIndicatorView.m; sourceTree = "<group>"; };
|
||||
0477BDF52EBC63A80055D639 /* KBTestVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBTestVC.h; sourceTree = "<group>"; };
|
||||
0477BDF62EBC63A80055D639 /* KBTestVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBTestVC.m; sourceTree = "<group>"; };
|
||||
0477BDF82EBC66340055D639 /* HomeHeadView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HomeHeadView.h; sourceTree = "<group>"; };
|
||||
0477BDF92EBC66340055D639 /* HomeHeadView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = HomeHeadView.m; sourceTree = "<group>"; };
|
||||
0477BDFB2EBC6A170055D639 /* HomeHotVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HomeHotVC.h; sourceTree = "<group>"; };
|
||||
@@ -580,9 +552,7 @@
|
||||
048FFD2F2F29F3C3005D62AE /* KBAIMessageZanVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAIMessageZanVC.m; sourceTree = "<group>"; };
|
||||
048FFD312F29F3D2005D62AE /* KBAIMessageChatingVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIMessageChatingVC.h; sourceTree = "<group>"; };
|
||||
048FFD322F29F3D2005D62AE /* KBAIMessageChatingVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAIMessageChatingVC.m; sourceTree = "<group>"; };
|
||||
048FFD342F29F88E005D62AE /* AIMessageVM.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AIMessageVM.h; sourceTree = "<group>"; };
|
||||
048FFD352F29F400005D62AE /* KBAIMessageListVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIMessageListVC.h; sourceTree = "<group>"; };
|
||||
048FFD352F29F88E005D62AE /* AIMessageVM.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AIMessageVM.m; sourceTree = "<group>"; };
|
||||
048FFD362F29F400005D62AE /* KBAIMessageListVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBAIMessageListVC.m; sourceTree = "<group>"; };
|
||||
048FFD372F2A24C5005D62AE /* KBAIChatMessageCacheManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIChatMessageCacheManager.h; sourceTree = "<group>"; };
|
||||
048FFD382F29F410005D62AE /* KBAIMessageCell.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBAIMessageCell.h; sourceTree = "<group>"; };
|
||||
@@ -685,6 +655,12 @@
|
||||
04A9FE1D2EB893F10020DB6D /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/Localizable.strings"; sourceTree = "<group>"; };
|
||||
04B5A1A02EEFA12300AAAAAA /* KBPayProductModel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBPayProductModel.h; sourceTree = "<group>"; };
|
||||
04B5A1A12EEFA12300AAAAAA /* KBPayProductModel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBPayProductModel.m; sourceTree = "<group>"; };
|
||||
04BBF8992F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBKeyboardStressTestVC.h; sourceTree = "<group>"; };
|
||||
04BBF89A2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBKeyboardStressTestVC.m; sourceTree = "<group>"; };
|
||||
04BBF89B2F3ACD8800B1FBB2 /* KBTestVC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBTestVC.h; sourceTree = "<group>"; };
|
||||
04BBF89C2F3ACD8800B1FBB2 /* KBTestVC.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBTestVC.m; sourceTree = "<group>"; };
|
||||
04BBF8FE2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DeepgramWebSocketClient.h; sourceTree = "<group>"; };
|
||||
04BBF8FF2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DeepgramWebSocketClient.m; sourceTree = "<group>"; };
|
||||
04C6EAAC2EAF86530089C901 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
|
||||
04C6EAAD2EAF86530089C901 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
|
||||
04C6EAAE2EAF86530089C901 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||
@@ -708,15 +684,9 @@
|
||||
04E0383C2F1A7C30002CA5A0 /* KBCustomTabBar.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBCustomTabBar.h; sourceTree = "<group>"; };
|
||||
04E0383D2F1A7C30002CA5A0 /* KBCustomTabBar.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KBCustomTabBar.m; sourceTree = "<group>"; };
|
||||
04E038D72F20BFFB002CA5A0 /* websocket-api.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = "websocket-api.md"; sourceTree = "<group>"; };
|
||||
04E038D92F20C420002CA5A0 /* VoiceChatStreamingManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VoiceChatStreamingManager.h; sourceTree = "<group>"; };
|
||||
04E038DA2F20C420002CA5A0 /* VoiceChatStreamingManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VoiceChatStreamingManager.m; sourceTree = "<group>"; };
|
||||
04E038DB2F20C420002CA5A0 /* VoiceChatWebSocketClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VoiceChatWebSocketClient.h; sourceTree = "<group>"; };
|
||||
04E038DC2F20C420002CA5A0 /* VoiceChatWebSocketClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VoiceChatWebSocketClient.m; sourceTree = "<group>"; };
|
||||
04E038E22F20E500002CA5A0 /* deepgramAPI.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = deepgramAPI.md; sourceTree = "<group>"; };
|
||||
04E038E42F20E877002CA5A0 /* DeepgramStreamingManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DeepgramStreamingManager.h; sourceTree = "<group>"; };
|
||||
04E038E52F20E877002CA5A0 /* DeepgramStreamingManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DeepgramStreamingManager.m; sourceTree = "<group>"; };
|
||||
04E038E62F20E877002CA5A0 /* DeepgramWebSocketClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DeepgramWebSocketClient.h; sourceTree = "<group>"; };
|
||||
04E038E72F20E877002CA5A0 /* DeepgramWebSocketClient.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DeepgramWebSocketClient.m; sourceTree = "<group>"; };
|
||||
04E038ED2F21F0EC002CA5A0 /* AiVM.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AiVM.h; sourceTree = "<group>"; };
|
||||
04E038EE2F21F0EC002CA5A0 /* AiVM.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AiVM.m; sourceTree = "<group>"; };
|
||||
04E039422F236E75002CA5A0 /* KBChatAssistantMessageCell.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KBChatAssistantMessageCell.h; sourceTree = "<group>"; };
|
||||
@@ -1177,42 +1147,20 @@
|
||||
0460866F2F191A5100757C95 /* VM */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
0460869D2F19239B00757C95 /* ASRStreamClient.h */,
|
||||
0460869E2F19239B00757C95 /* ASRStreamClient.m */,
|
||||
0460869F2F19239B00757C95 /* AudioCaptureManager.h */,
|
||||
046086A02F19239B00757C95 /* AudioCaptureManager.m */,
|
||||
046086A12F19239B00757C95 /* AudioSessionManager.h */,
|
||||
046086A22F19239B00757C95 /* AudioSessionManager.m */,
|
||||
046086A32F19239B00757C95 /* AudioStreamPlayer.h */,
|
||||
046086A42F19239B00757C95 /* AudioStreamPlayer.m */,
|
||||
046086A52F19239B00757C95 /* ConversationOrchestrator.h */,
|
||||
046086A62F19239B00757C95 /* ConversationOrchestrator.m */,
|
||||
046086A72F19239B00757C95 /* LLMStreamClient.h */,
|
||||
046086A82F19239B00757C95 /* LLMStreamClient.m */,
|
||||
046086A92F19239B00757C95 /* Segmenter.h */,
|
||||
046086AA2F19239B00757C95 /* Segmenter.m */,
|
||||
046086AB2F19239B00757C95 /* SubtitleSync.h */,
|
||||
046086AC2F19239B00757C95 /* SubtitleSync.m */,
|
||||
046086AD2F19239B00757C95 /* TTSPlaybackPipeline.h */,
|
||||
046086AE2F19239B00757C95 /* TTSPlaybackPipeline.m */,
|
||||
046086AF2F19239B00757C95 /* TTSServiceClient.h */,
|
||||
046086B02F19239B00757C95 /* TTSServiceClient.m */,
|
||||
04E038D92F20C420002CA5A0 /* VoiceChatStreamingManager.h */,
|
||||
04E038DA2F20C420002CA5A0 /* VoiceChatStreamingManager.m */,
|
||||
04E038DB2F20C420002CA5A0 /* VoiceChatWebSocketClient.h */,
|
||||
04E038DC2F20C420002CA5A0 /* VoiceChatWebSocketClient.m */,
|
||||
04E038E42F20E877002CA5A0 /* DeepgramStreamingManager.h */,
|
||||
04E038E52F20E877002CA5A0 /* DeepgramStreamingManager.m */,
|
||||
04BBF8FE2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.h */,
|
||||
04BBF8FF2F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m */,
|
||||
04E0B1002F300001002CA5A0 /* KBVoiceToTextManager.h */,
|
||||
04E0B1012F300001002CA5A0 /* KBVoiceToTextManager.m */,
|
||||
04E0B2002F300002002CA5A0 /* KBVoiceRecordManager.h */,
|
||||
04E0B2012F300002002CA5A0 /* KBVoiceRecordManager.m */,
|
||||
04E038E62F20E877002CA5A0 /* DeepgramWebSocketClient.h */,
|
||||
04E038E72F20E877002CA5A0 /* DeepgramWebSocketClient.m */,
|
||||
04E038ED2F21F0EC002CA5A0 /* AiVM.h */,
|
||||
04E038EE2F21F0EC002CA5A0 /* AiVM.m */,
|
||||
048FFD342F29F88E005D62AE /* AIMessageVM.h */,
|
||||
048FFD352F29F88E005D62AE /* AIMessageVM.m */,
|
||||
);
|
||||
path = VM;
|
||||
sourceTree = "<group>";
|
||||
@@ -1247,12 +1195,14 @@
|
||||
0477BE012EBC6D420055D639 /* FunctionTest */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
0477BDF52EBC63A80055D639 /* KBTestVC.h */,
|
||||
0477BDF62EBC63A80055D639 /* KBTestVC.m */,
|
||||
A1B2D7002EB8C00100000001 /* KBLangTestVC.h */,
|
||||
A1B2D7012EB8C00100000001 /* KBLangTestVC.m */,
|
||||
0459D1B22EBA284C00F2D189 /* KBSkinCenterVC.h */,
|
||||
0459D1B32EBA284C00F2D189 /* KBSkinCenterVC.m */,
|
||||
04BBF8992F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.h */,
|
||||
04BBF89A2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m */,
|
||||
04BBF89B2F3ACD8800B1FBB2 /* KBTestVC.h */,
|
||||
04BBF89C2F3ACD8800B1FBB2 /* KBTestVC.m */,
|
||||
);
|
||||
path = FunctionTest;
|
||||
sourceTree = "<group>";
|
||||
@@ -1586,6 +1536,13 @@
|
||||
path = Localization;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
04BBF8E52F3B50C000B1FBB2 /* KeyboardViewControllerHelp */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
);
|
||||
path = KeyboardViewControllerHelp;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
04C6EAB92EAF86530089C901 /* keyBoard */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
@@ -1619,6 +1576,7 @@
|
||||
04C6EAD42EAF870B0089C901 /* Info.plist */,
|
||||
04C6EAD52EAF870B0089C901 /* KeyboardViewController.h */,
|
||||
04C6EAD62EAF870B0089C901 /* KeyboardViewController.m */,
|
||||
04BBF8E52F3B50C000B1FBB2 /* KeyboardViewControllerHelp */,
|
||||
04C6EADE2EAF8D680089C901 /* PrefixHeader.pch */,
|
||||
04286A0A2ECD88B400CE730C /* KeyboardAssets.xcassets */,
|
||||
);
|
||||
@@ -2386,15 +2344,15 @@
|
||||
0498BD862EE1BEC9006CC1D5 /* KBSignUtils.m in Sources */,
|
||||
04791FFC2ED71D17004E8522 /* UIColor+Extension.m in Sources */,
|
||||
0450AC4A2EF2C3ED00B6AF06 /* KBKeyboardSubscriptionOptionCell.m in Sources */,
|
||||
04A9FE0F2EB481100020DB6D /* KBHUD.m in Sources */,
|
||||
048FFD562F2B9C3D005D62AE /* KBChatAssistantCell.m in Sources */,
|
||||
048FFD572F2B9C3D005D62AE /* KBChatUserCell.m in Sources */,
|
||||
A1B2C9302FCA000100000001 /* KBChatLimitPopView.m in Sources */,
|
||||
04C6EADD2EAF8CEB0089C901 /* KBToolBar.m in Sources */,
|
||||
A1B2C9262FC9000100000001 /* KBChatMessage.m in Sources */,
|
||||
A1B2C9272FC9000100000001 /* KBChatMessageCell.m in Sources */,
|
||||
A1B2C9282FC9000100000001 /* KBChatPanelView.m in Sources */,
|
||||
A1B2C3EB2F20000000000001 /* KBSuggestionBarView.m in Sources */,
|
||||
04A9FE0F2EB481100020DB6D /* KBHUD.m in Sources */,
|
||||
048FFD562F2B9C3D005D62AE /* KBChatAssistantCell.m in Sources */,
|
||||
048FFD572F2B9C3D005D62AE /* KBChatUserCell.m in Sources */,
|
||||
A1B2C9302FCA000100000001 /* KBChatLimitPopView.m in Sources */,
|
||||
04C6EADD2EAF8CEB0089C901 /* KBToolBar.m in Sources */,
|
||||
A1B2C9262FC9000100000001 /* KBChatMessage.m in Sources */,
|
||||
A1B2C9272FC9000100000001 /* KBChatMessageCell.m in Sources */,
|
||||
A1B2C9282FC9000100000001 /* KBChatPanelView.m in Sources */,
|
||||
A1B2C3EB2F20000000000001 /* KBSuggestionBarView.m in Sources */,
|
||||
0419C9662F2C7693002E86D3 /* KBVM.m in Sources */,
|
||||
048FFD512F2B68F7005D62AE /* KBPersonaModel.m in Sources */,
|
||||
04FC95792EB09BC8007BD342 /* KBKeyBoardMainView.m in Sources */,
|
||||
@@ -2538,7 +2496,6 @@
|
||||
0498BD712EE02A41006CC1D5 /* KBForgetPwdNewPwdVC.m in Sources */,
|
||||
048908EF2EBF861800FABA60 /* KBSkinSectionTitleCell.m in Sources */,
|
||||
0450AAE22EF03D5100B6AF06 /* KBPerson.swift in Sources */,
|
||||
04E038E82F20E877002CA5A0 /* DeepgramWebSocketClient.m in Sources */,
|
||||
04E038E92F20E877002CA5A0 /* DeepgramStreamingManager.m in Sources */,
|
||||
04E0B1022F300001002CA5A0 /* KBVoiceToTextManager.m in Sources */,
|
||||
04E0B2022F300002002CA5A0 /* KBVoiceRecordManager.m in Sources */,
|
||||
@@ -2550,18 +2507,16 @@
|
||||
048908D22EBF611D00FABA60 /* KBHistoryMoreCell.m in Sources */,
|
||||
04FC95D82EB1EA16007BD342 /* BaseCell.m in Sources */,
|
||||
0498BD852EE1B255006CC1D5 /* KBSignUtils.m in Sources */,
|
||||
0477BDF72EBC63A80055D639 /* KBTestVC.m in Sources */,
|
||||
04122F7E2EC5FC5500EF7AB3 /* KBJfPayCell.m in Sources */,
|
||||
048FFD502F2B52E7005D62AE /* AIReportVC.m in Sources */,
|
||||
049FB2402EC4B6EF00FAB05D /* KBULBridgeNotification.m in Sources */,
|
||||
04BBF9002F3C97CB00B1FBB2 /* DeepgramWebSocketClient.m in Sources */,
|
||||
04FC95C92EB1E4C9007BD342 /* BaseNavigationController.m in Sources */,
|
||||
048908DD2EBF67EB00FABA60 /* KBSearchResultVC.m in Sources */,
|
||||
05A1B2D12F5B1A2B3C4D5E60 /* KBSearchVM.m in Sources */,
|
||||
05A1B2D22F5B1A2B3C4D5E60 /* KBSearchThemeModel.m in Sources */,
|
||||
047C65102EBCA8DD0035E841 /* HomeRankContentVC.m in Sources */,
|
||||
047C655C2EBCD0F80035E841 /* UIView+KBShadow.m in Sources */,
|
||||
04E038DD2F20C420002CA5A0 /* VoiceChatStreamingManager.m in Sources */,
|
||||
04E038DE2F20C420002CA5A0 /* VoiceChatWebSocketClient.m in Sources */,
|
||||
04F4C0B52F33053800E8F08C /* KBSvipBenefitCell.m in Sources */,
|
||||
04F4C0B62F33053800E8F08C /* KBSvipSubscribeCell.m in Sources */,
|
||||
049FB2262EC3136D00FAB05D /* KBPersonInfoItemCell.m in Sources */,
|
||||
@@ -2572,7 +2527,6 @@
|
||||
04FC95E52EB220B5007BD342 /* UIColor+Extension.m in Sources */,
|
||||
048908E02EBF73DC00FABA60 /* MySkinVC.m in Sources */,
|
||||
04F4C0AA2F32274000E8F08C /* KBPayMainVC.m in Sources */,
|
||||
048FFD362F29F88E005D62AE /* AIMessageVM.m in Sources */,
|
||||
048908F22EC047FD00FABA60 /* KBShopHeadView.m in Sources */,
|
||||
0498BD742EE02E3D006CC1D5 /* KBRegistVerEmailVC.m in Sources */,
|
||||
049FB2292EC31BB000FAB05D /* KBChangeNicknamePopView.m in Sources */,
|
||||
@@ -2595,16 +2549,8 @@
|
||||
048FFD112F27432D005D62AE /* KBPersonaPageModel.m in Sources */,
|
||||
0498BD6B2EE025FC006CC1D5 /* KBForgetPwdVC.m in Sources */,
|
||||
048FFD182F2763A5005D62AE /* KBVoiceInputBar.m in Sources */,
|
||||
046086B12F19239B00757C95 /* SubtitleSync.m in Sources */,
|
||||
046086B22F19239B00757C95 /* TTSServiceClient.m in Sources */,
|
||||
046086B32F19239B00757C95 /* AudioSessionManager.m in Sources */,
|
||||
046086B42F19239B00757C95 /* LLMStreamClient.m in Sources */,
|
||||
046086B52F19239B00757C95 /* Segmenter.m in Sources */,
|
||||
046086B62F19239B00757C95 /* TTSPlaybackPipeline.m in Sources */,
|
||||
046086B72F19239B00757C95 /* ConversationOrchestrator.m in Sources */,
|
||||
046086B82F19239B00757C95 /* ASRStreamClient.m in Sources */,
|
||||
046086B92F19239B00757C95 /* AudioCaptureManager.m in Sources */,
|
||||
046086BA2F19239B00757C95 /* AudioStreamPlayer.m in Sources */,
|
||||
048908FE2EC0CC2400FABA60 /* UIScrollView+KBEmptyView.m in Sources */,
|
||||
0498BD7E2EE04F9C006CC1D5 /* KBTag.m in Sources */,
|
||||
04791F922ED48010004E8522 /* KBNoticeVC.m in Sources */,
|
||||
@@ -2652,6 +2598,8 @@
|
||||
A1B2E1012EBC7AAA00000001 /* KBTopThreeView.m in Sources */,
|
||||
A1B2E1022EBC7AAA00000001 /* HomeHotCell.m in Sources */,
|
||||
048FFD272F28C6CF005D62AE /* KBImagePositionButton.m in Sources */,
|
||||
04BBF89D2F3ACD8800B1FBB2 /* KBKeyboardStressTestVC.m in Sources */,
|
||||
04BBF89E2F3ACD8800B1FBB2 /* KBTestVC.m in Sources */,
|
||||
0459D1B72EBA287900F2D189 /* KBSkinManager.m in Sources */,
|
||||
04286A002ECAEF2B00CE730C /* KBMoneyBtn.m in Sources */,
|
||||
048908F52EC0496400FABA60 /* KBShopItemVC.m in Sources */,
|
||||
|
||||
@@ -57,8 +57,12 @@
|
||||
debugServiceExtension = "internal"
|
||||
allowLocationSimulation = "YES"
|
||||
launchAutomaticallySubstyle = "2">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<RemoteRunnable
|
||||
runnableDebuggingMode = "0"
|
||||
BundleIdentifier = "com.loveKey.nyx"
|
||||
RemotePath = "/var/containers/Bundle/Application/E51DCFA2-A182-4B31-8A45-BCCF663ADCAA/keyBoard.app">
|
||||
</RemoteRunnable>
|
||||
<MacroExpansion>
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "727EC7522EAF848B00B36487"
|
||||
@@ -66,7 +70,14 @@
|
||||
BlueprintName = "keyBoard"
|
||||
ReferencedContainer = "container:keyBoard.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</MacroExpansion>
|
||||
<EnvironmentVariables>
|
||||
<EnvironmentVariable
|
||||
key = "OS_ACTIVITY_MODE"
|
||||
value = "disable"
|
||||
isEnabled = "NO">
|
||||
</EnvironmentVariable>
|
||||
</EnvironmentVariables>
|
||||
</LaunchAction>
|
||||
<ProfileAction
|
||||
buildConfiguration = "Release"
|
||||
|
||||
@@ -5,12 +5,12 @@
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "tab_shequ_selected@2x.png",
|
||||
"filename" : "切图 145@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "tab_shequ_selected@3x.png",
|
||||
"filename" : "切图 145@3x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 3.7 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 6.9 KiB |
BIN
keyBoard/Assets.xcassets/Tabbar/tab_shequ_selected.imageset/切图 145@2x.png
vendored
Normal file
BIN
keyBoard/Assets.xcassets/Tabbar/tab_shequ_selected.imageset/切图 145@2x.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.5 KiB |
BIN
keyBoard/Assets.xcassets/Tabbar/tab_shequ_selected.imageset/切图 145@3x.png
vendored
Normal file
BIN
keyBoard/Assets.xcassets/Tabbar/tab_shequ_selected.imageset/切图 145@3x.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.3 KiB |
@@ -50,6 +50,7 @@
|
||||
@property (nonatomic, strong) UIView *bottomBackgroundView;
|
||||
@property (nonatomic, strong) UIVisualEffectView *bottomBlurEffectView;
|
||||
@property (nonatomic, strong) CAGradientLayer *bottomMaskLayer;
|
||||
@property (nonatomic, strong) CAGradientLayer *bottomGradientLayer;
|
||||
|
||||
/// 语音转写管理器
|
||||
@property (nonatomic, strong) KBVoiceToTextManager *voiceToTextManager;
|
||||
@@ -184,15 +185,15 @@ static NSString * const KBAISelectedPersonaIdKey = @"KBAISelectedPersonaId";
|
||||
|
||||
- (void)viewDidLayoutSubviews {
|
||||
[super viewDidLayoutSubviews];
|
||||
if (self.bottomMaskLayer) {
|
||||
self.bottomMaskLayer.frame = self.bottomBlurEffectView.bounds;
|
||||
if (self.bottomGradientLayer) {
|
||||
self.bottomGradientLayer.frame = self.bottomBackgroundView.bounds;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - 1:控件初始化
|
||||
|
||||
- (void)setupUI {
|
||||
self.voiceInputBarHeight = 52;
|
||||
self.voiceInputBarHeight = 70;
|
||||
self.baseInputBarBottomSpacing = KB_TABBAR_HEIGHT;
|
||||
[self.view addSubview:self.collectionView];
|
||||
[self.collectionView mas_makeConstraints:^(MASConstraintMaker *make) {
|
||||
@@ -244,7 +245,7 @@ static NSString * const KBAISelectedPersonaIdKey = @"KBAISelectedPersonaId";
|
||||
make.left.equalTo(self.view).offset(12);
|
||||
make.right.equalTo(self.view).offset(-12);
|
||||
self.commentInputBottomConstraint = make.bottom.equalTo(self.view).offset(100); // 初始在屏幕外
|
||||
make.height.mas_equalTo(self.voiceInputBarHeight);
|
||||
make.height.mas_equalTo(52);
|
||||
}];
|
||||
}
|
||||
|
||||
@@ -479,10 +480,10 @@ static NSString * const KBAISelectedPersonaIdKey = @"KBAISelectedPersonaId";
|
||||
[ud setObject:personaDict forKey:@"AppGroup_SelectedPersona"];
|
||||
[ud synchronize];
|
||||
|
||||
NSLog(@"[KBAIHomeVC] 已保存选中的 persona 到 AppGroup: %@, coverImageUrl: %@", persona.name, persona.coverImageUrl);
|
||||
NSLog(@"[KBAIHomeVC] 已保存选中的 persona 到 AppGroup: %@, avatarUrl: %@", persona.name, persona.avatarUrl);
|
||||
|
||||
// 异步下载并缩小图片,保存到 AppGroup 共享目录
|
||||
[self downloadAndSavePersonaCoverImage:persona.coverImageUrl];
|
||||
[self downloadAndSavePersonaCoverImage:persona.avatarUrl];
|
||||
}
|
||||
|
||||
/// 下载并缩小 persona 封面图,保存到 AppGroup 共享目录
|
||||
@@ -510,18 +511,17 @@ static NSString * const KBAISelectedPersonaIdKey = @"KBAISelectedPersonaId";
|
||||
return;
|
||||
}
|
||||
|
||||
// 缩小图片到适合键盘扩展的尺寸(宽度 390,高度按比例)
|
||||
CGFloat targetWidth = 390.0;
|
||||
CGFloat scale = targetWidth / image.size.width;
|
||||
CGSize targetSize = CGSizeMake(targetWidth, image.size.height * scale);
|
||||
// 缩小图片到 40x40(仅用于工具栏头像显示)
|
||||
CGFloat targetSide = 40.0;
|
||||
CGSize targetSize = CGSizeMake(targetSide, targetSide);
|
||||
|
||||
UIGraphicsBeginImageContextWithOptions(targetSize, YES, 1.0);
|
||||
UIGraphicsBeginImageContextWithOptions(targetSize, NO, 1.0);
|
||||
[image drawInRect:CGRectMake(0, 0, targetSize.width, targetSize.height)];
|
||||
UIImage *scaledImage = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
|
||||
// 压缩为 JPEG,质量 0.6
|
||||
NSData *jpegData = UIImageJPEGRepresentation(scaledImage, 0.6);
|
||||
// 压缩为 JPEG,质量 0.8
|
||||
NSData *jpegData = UIImageJPEGRepresentation(scaledImage, 0.8);
|
||||
if (!jpegData) {
|
||||
NSLog(@"[KBAIHomeVC] 压缩图片失败");
|
||||
return;
|
||||
@@ -891,32 +891,38 @@ static NSString * const KBAISelectedPersonaIdKey = @"KBAISelectedPersonaId";
|
||||
if (!_bottomBackgroundView) {
|
||||
_bottomBackgroundView = [[UIView alloc] init];
|
||||
_bottomBackgroundView.clipsToBounds = YES;
|
||||
// 添加渐变遮罩层,实现从底部到顶部的渐变显示效果
|
||||
_bottomBackgroundView.layer.mask = self.bottomGradientLayer;
|
||||
}
|
||||
return _bottomBackgroundView;
|
||||
}
|
||||
|
||||
- (UIVisualEffectView *)bottomBlurEffectView {
|
||||
if (!_bottomBlurEffectView) {
|
||||
UIBlurEffect *blurEffect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleLight];
|
||||
// 使用深色毛玻璃效果
|
||||
UIBlurEffect *blurEffect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleDark];
|
||||
_bottomBlurEffectView = [[UIVisualEffectView alloc] initWithEffect:blurEffect];
|
||||
_bottomBlurEffectView.layer.mask = self.bottomMaskLayer;
|
||||
_bottomBlurEffectView.alpha = 0.9; // 稍微降低整体透明度
|
||||
}
|
||||
return _bottomBlurEffectView;
|
||||
}
|
||||
|
||||
- (CAGradientLayer *)bottomMaskLayer {
|
||||
if (!_bottomMaskLayer) {
|
||||
_bottomMaskLayer = [CAGradientLayer layer];
|
||||
_bottomMaskLayer.startPoint = CGPointMake(0.5, 1);
|
||||
_bottomMaskLayer.endPoint = CGPointMake(0.5, 0);
|
||||
_bottomMaskLayer.colors = @[
|
||||
(__bridge id)[UIColor whiteColor].CGColor,
|
||||
(__bridge id)[UIColor whiteColor].CGColor,
|
||||
(__bridge id)[UIColor clearColor].CGColor
|
||||
- (CAGradientLayer *)bottomGradientLayer {
|
||||
if (!_bottomGradientLayer) {
|
||||
_bottomGradientLayer = [CAGradientLayer layer];
|
||||
// 从底部到顶部
|
||||
_bottomGradientLayer.startPoint = CGPointMake(0.5, 1);
|
||||
_bottomGradientLayer.endPoint = CGPointMake(0.5, 0);
|
||||
// 作为遮罩层:底部完全不透明(白色),顶部完全透明(透明色)
|
||||
// 中间位置开始渐变,让底部区域保持完整的毛玻璃效果
|
||||
_bottomGradientLayer.colors = @[
|
||||
(__bridge id)[UIColor whiteColor].CGColor, // 底部:完全不透明
|
||||
(__bridge id)[UIColor whiteColor].CGColor, // 中间偏下:完全不透明
|
||||
(__bridge id)[[UIColor whiteColor] colorWithAlphaComponent:0.0].CGColor // 顶部:完全透明
|
||||
];
|
||||
_bottomMaskLayer.locations = @[@(0.0), @(0.5), @(1.0)];
|
||||
_bottomGradientLayer.locations = @[@(0.0), @(0.4), @(1.0)];
|
||||
}
|
||||
return _bottomMaskLayer;
|
||||
return _bottomGradientLayer;
|
||||
}
|
||||
|
||||
- (UIButton *)messageButton {
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
//
|
||||
// AIMessageVM.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/28.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface AIMessageVM : NSObject
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,12 +0,0 @@
|
||||
//
|
||||
// AIMessageVM.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/28.
|
||||
//
|
||||
|
||||
#import "AIMessageVM.h"
|
||||
|
||||
@implementation AIMessageVM
|
||||
|
||||
@end
|
||||
@@ -1,51 +0,0 @@
|
||||
//
|
||||
// ASRStreamClient.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// ASR 流式识别客户端代理
|
||||
@protocol ASRStreamClientDelegate <NSObject>
|
||||
@required
|
||||
/// 收到实时识别结果(部分文本)
|
||||
- (void)asrClientDidReceivePartialText:(NSString *)text;
|
||||
/// 收到最终识别结果
|
||||
- (void)asrClientDidReceiveFinalText:(NSString *)text;
|
||||
/// 识别失败
|
||||
- (void)asrClientDidFail:(NSError *)error;
|
||||
@end
|
||||
|
||||
/// ASR 流式识别客户端
|
||||
/// 使用 NSURLSessionWebSocketTask 实现流式语音识别
|
||||
@interface ASRStreamClient : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<ASRStreamClientDelegate> delegate;
|
||||
|
||||
/// ASR 服务器 WebSocket URL
|
||||
@property(nonatomic, copy) NSString *serverURL;
|
||||
|
||||
/// 是否已连接
|
||||
@property(nonatomic, assign, readonly, getter=isConnected) BOOL connected;
|
||||
|
||||
/// 开始新的识别会话
|
||||
/// @param sessionId 会话 ID
|
||||
- (void)startWithSessionId:(NSString *)sessionId;
|
||||
|
||||
/// 发送 PCM 音频帧(20ms / 640 bytes)
|
||||
/// @param pcmFrame PCM 数据
|
||||
- (void)sendAudioPCMFrame:(NSData *)pcmFrame;
|
||||
|
||||
/// 结束当前会话,请求最终结果
|
||||
- (void)finalize;
|
||||
|
||||
/// 取消会话
|
||||
- (void)cancel;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,271 +0,0 @@
|
||||
//
|
||||
// ASRStreamClient.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "ASRStreamClient.h"
|
||||
#import "AudioCaptureManager.h"
|
||||
|
||||
@interface ASRStreamClient () <NSURLSessionWebSocketDelegate>
|
||||
|
||||
@property(nonatomic, strong) NSURLSession *urlSession;
|
||||
@property(nonatomic, strong) NSURLSessionWebSocketTask *webSocketTask;
|
||||
@property(nonatomic, copy) NSString *currentSessionId;
|
||||
@property(nonatomic, strong) dispatch_queue_t networkQueue;
|
||||
@property(nonatomic, assign) BOOL connected;
|
||||
|
||||
@end
|
||||
|
||||
@implementation ASRStreamClient
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.asr.network",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
// TODO: 替换为实际的 ASR 服务器地址
|
||||
_serverURL = @"wss://your-asr-server.com/ws/asr";
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self cancelInternal];
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (void)startWithSessionId:(NSString *)sessionId {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
[self cancelInternal];
|
||||
|
||||
self.currentSessionId = sessionId;
|
||||
|
||||
// 创建 WebSocket 连接
|
||||
NSURL *url = [NSURL URLWithString:self.serverURL];
|
||||
NSURLSessionConfiguration *config =
|
||||
[NSURLSessionConfiguration defaultSessionConfiguration];
|
||||
config.timeoutIntervalForRequest = 30;
|
||||
config.timeoutIntervalForResource = 300;
|
||||
|
||||
self.urlSession = [NSURLSession sessionWithConfiguration:config
|
||||
delegate:self
|
||||
delegateQueue:nil];
|
||||
|
||||
self.webSocketTask = [self.urlSession webSocketTaskWithURL:url];
|
||||
[self.webSocketTask resume];
|
||||
|
||||
// 发送 start 消息
|
||||
NSDictionary *startMessage = @{
|
||||
@"type" : @"start",
|
||||
@"sessionId" : sessionId,
|
||||
@"format" : @"pcm_s16le",
|
||||
@"sampleRate" : @(kAudioSampleRate),
|
||||
@"channels" : @(kAudioChannels)
|
||||
};
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:startMessage
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *jsonString = [[NSString alloc] initWithData:jsonData
|
||||
encoding:NSUTF8StringEncoding];
|
||||
NSURLSessionWebSocketMessage *message =
|
||||
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
|
||||
|
||||
[self.webSocketTask
|
||||
sendMessage:message
|
||||
completionHandler:^(NSError *_Nullable error) {
|
||||
if (error) {
|
||||
[self reportError:error];
|
||||
} else {
|
||||
self.connected = YES;
|
||||
[self receiveMessage];
|
||||
NSLog(@"[ASRStreamClient] Started session: %@", sessionId);
|
||||
}
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)sendAudioPCMFrame:(NSData *)pcmFrame {
|
||||
if (!self.connected || !self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
NSURLSessionWebSocketMessage *message =
|
||||
[[NSURLSessionWebSocketMessage alloc] initWithData:pcmFrame];
|
||||
[self.webSocketTask sendMessage:message
|
||||
completionHandler:^(NSError *_Nullable error) {
|
||||
if (error) {
|
||||
NSLog(@"[ASRStreamClient] Failed to send audio frame: %@",
|
||||
error.localizedDescription);
|
||||
}
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)finalize {
|
||||
if (!self.connected || !self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
NSDictionary *finalizeMessage =
|
||||
@{@"type" : @"finalize", @"sessionId" : self.currentSessionId ?: @""};
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:finalizeMessage
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *jsonString = [[NSString alloc] initWithData:jsonData
|
||||
encoding:NSUTF8StringEncoding];
|
||||
NSURLSessionWebSocketMessage *message =
|
||||
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
|
||||
|
||||
[self.webSocketTask sendMessage:message
|
||||
completionHandler:^(NSError *_Nullable error) {
|
||||
if (error) {
|
||||
[self reportError:error];
|
||||
} else {
|
||||
NSLog(@"[ASRStreamClient] Sent finalize for session: %@",
|
||||
self.currentSessionId);
|
||||
}
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)cancel {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
[self cancelInternal];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (void)cancelInternal {
|
||||
self.connected = NO;
|
||||
|
||||
if (self.webSocketTask) {
|
||||
[self.webSocketTask cancel];
|
||||
self.webSocketTask = nil;
|
||||
}
|
||||
|
||||
if (self.urlSession) {
|
||||
[self.urlSession invalidateAndCancel];
|
||||
self.urlSession = nil;
|
||||
}
|
||||
|
||||
self.currentSessionId = nil;
|
||||
}
|
||||
|
||||
- (void)receiveMessage {
|
||||
if (!self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[self.webSocketTask receiveMessageWithCompletionHandler:^(
|
||||
NSURLSessionWebSocketMessage *_Nullable message,
|
||||
NSError *_Nullable error) {
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf)
|
||||
return;
|
||||
|
||||
if (error) {
|
||||
// 检查是否是正常关闭
|
||||
if (error.code != 57 && error.code != NSURLErrorCancelled) {
|
||||
[strongSelf reportError:error];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type == NSURLSessionWebSocketMessageTypeString) {
|
||||
[strongSelf handleTextMessage:message.string];
|
||||
}
|
||||
|
||||
// 继续接收下一条消息
|
||||
[strongSelf receiveMessage];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleTextMessage:(NSString *)text {
|
||||
NSData *data = [text dataUsingEncoding:NSUTF8StringEncoding];
|
||||
NSError *jsonError = nil;
|
||||
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data
|
||||
options:0
|
||||
error:&jsonError];
|
||||
|
||||
if (jsonError) {
|
||||
NSLog(@"[ASRStreamClient] Failed to parse message: %@", text);
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *type = json[@"type"];
|
||||
|
||||
if ([type isEqualToString:@"partial"]) {
|
||||
NSString *partialText = json[@"text"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(asrClientDidReceivePartialText:)]) {
|
||||
[self.delegate asrClientDidReceivePartialText:partialText];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"final"]) {
|
||||
NSString *finalText = json[@"text"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(asrClientDidReceiveFinalText:)]) {
|
||||
[self.delegate asrClientDidReceiveFinalText:finalText];
|
||||
}
|
||||
});
|
||||
// 收到最终结果后关闭连接
|
||||
[self cancelInternal];
|
||||
} else if ([type isEqualToString:@"error"]) {
|
||||
NSInteger code = [json[@"code"] integerValue];
|
||||
NSString *message = json[@"message"] ?: @"Unknown error";
|
||||
NSError *error =
|
||||
[NSError errorWithDomain:@"ASRStreamClient"
|
||||
code:code
|
||||
userInfo:@{NSLocalizedDescriptionKey : message}];
|
||||
[self reportError:error];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)reportError:(NSError *)error {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector(asrClientDidFail:)]) {
|
||||
[self.delegate asrClientDidFail:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - NSURLSessionWebSocketDelegate
|
||||
|
||||
- (void)URLSession:(NSURLSession *)session
|
||||
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
|
||||
didOpenWithProtocol:(NSString *)protocol {
|
||||
NSLog(@"[ASRStreamClient] WebSocket connected with protocol: %@", protocol);
|
||||
}
|
||||
|
||||
- (void)URLSession:(NSURLSession *)session
|
||||
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
|
||||
didCloseWithCode:(NSURLSessionWebSocketCloseCode)closeCode
|
||||
reason:(NSData *)reason {
|
||||
NSLog(@"[ASRStreamClient] WebSocket closed with code: %ld", (long)closeCode);
|
||||
self.connected = NO;
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,63 +0,0 @@
|
||||
//
|
||||
// AudioStreamPlayer.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// 流式音频播放器代理
|
||||
@protocol AudioStreamPlayerDelegate <NSObject>
|
||||
@optional
|
||||
/// 开始播放片段
|
||||
- (void)audioStreamPlayerDidStartSegment:(NSString *)segmentId;
|
||||
/// 播放时间更新
|
||||
- (void)audioStreamPlayerDidUpdateTime:(NSTimeInterval)time
|
||||
segmentId:(NSString *)segmentId;
|
||||
/// 片段播放完成
|
||||
- (void)audioStreamPlayerDidFinishSegment:(NSString *)segmentId;
|
||||
@end
|
||||
|
||||
/// PCM 流式播放器
|
||||
/// 使用 AVAudioEngine + AVAudioPlayerNode 实现低延迟播放
|
||||
@interface AudioStreamPlayer : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<AudioStreamPlayerDelegate> delegate;
|
||||
|
||||
/// 是否正在播放
|
||||
@property(nonatomic, assign, readonly, getter=isPlaying) BOOL playing;
|
||||
|
||||
/// 启动播放器
|
||||
/// @param error 错误信息
|
||||
/// @return 是否启动成功
|
||||
- (BOOL)start:(NSError **)error;
|
||||
|
||||
/// 停止播放器
|
||||
- (void)stop;
|
||||
|
||||
/// 入队 PCM 数据块
|
||||
/// @param pcmData PCM Int16 数据
|
||||
/// @param sampleRate 采样率
|
||||
/// @param channels 通道数
|
||||
/// @param segmentId 片段 ID
|
||||
- (void)enqueuePCMChunk:(NSData *)pcmData
|
||||
sampleRate:(double)sampleRate
|
||||
channels:(int)channels
|
||||
segmentId:(NSString *)segmentId;
|
||||
|
||||
/// 获取片段的当前播放时间
|
||||
/// @param segmentId 片段 ID
|
||||
/// @return 当前时间(秒)
|
||||
- (NSTimeInterval)playbackTimeForSegment:(NSString *)segmentId;
|
||||
|
||||
/// 获取片段的总时长
|
||||
/// @param segmentId 片段 ID
|
||||
/// @return 总时长(秒)
|
||||
- (NSTimeInterval)durationForSegment:(NSString *)segmentId;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,246 +0,0 @@
|
||||
//
|
||||
// AudioStreamPlayer.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "AudioStreamPlayer.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
@interface AudioStreamPlayer ()
|
||||
|
||||
@property(nonatomic, strong) AVAudioEngine *audioEngine;
|
||||
@property(nonatomic, strong) AVAudioPlayerNode *playerNode;
|
||||
@property(nonatomic, strong) AVAudioFormat *playbackFormat;
|
||||
|
||||
// 片段跟踪
|
||||
@property(nonatomic, copy) NSString *currentSegmentId;
|
||||
@property(nonatomic, strong)
|
||||
NSMutableDictionary<NSString *, NSNumber *> *segmentDurations;
|
||||
@property(nonatomic, strong)
|
||||
NSMutableDictionary<NSString *, NSNumber *> *segmentStartTimes;
|
||||
@property(nonatomic, assign) NSUInteger scheduledSamples;
|
||||
@property(nonatomic, assign) NSUInteger playedSamples;
|
||||
|
||||
// 状态
|
||||
@property(nonatomic, assign) BOOL playing;
|
||||
@property(nonatomic, strong) dispatch_queue_t playerQueue;
|
||||
@property(nonatomic, strong) NSTimer *progressTimer;
|
||||
|
||||
@end
|
||||
|
||||
@implementation AudioStreamPlayer
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_audioEngine = [[AVAudioEngine alloc] init];
|
||||
_playerNode = [[AVAudioPlayerNode alloc] init];
|
||||
_segmentDurations = [[NSMutableDictionary alloc] init];
|
||||
_segmentStartTimes = [[NSMutableDictionary alloc] init];
|
||||
_playerQueue = dispatch_queue_create("com.keyboard.aitalk.streamplayer",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
|
||||
// 默认播放格式:16kHz, Mono, Float32
|
||||
_playbackFormat =
|
||||
[[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
|
||||
sampleRate:16000
|
||||
channels:1
|
||||
interleaved:NO];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self stop];
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (BOOL)start:(NSError **)error {
|
||||
if (self.playing) {
|
||||
return YES;
|
||||
}
|
||||
|
||||
// 连接节点
|
||||
[self.audioEngine attachNode:self.playerNode];
|
||||
[self.audioEngine connect:self.playerNode
|
||||
to:self.audioEngine.mainMixerNode
|
||||
format:self.playbackFormat];
|
||||
|
||||
// 启动引擎
|
||||
NSError *startError = nil;
|
||||
[self.audioEngine prepare];
|
||||
|
||||
if (![self.audioEngine startAndReturnError:&startError]) {
|
||||
if (error) {
|
||||
*error = startError;
|
||||
}
|
||||
NSLog(@"[AudioStreamPlayer] Failed to start engine: %@",
|
||||
startError.localizedDescription);
|
||||
return NO;
|
||||
}
|
||||
|
||||
[self.playerNode play];
|
||||
self.playing = YES;
|
||||
|
||||
// 启动进度更新定时器
|
||||
[self startProgressTimer];
|
||||
|
||||
NSLog(@"[AudioStreamPlayer] Started");
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
dispatch_async(self.playerQueue, ^{
|
||||
[self stopProgressTimer];
|
||||
|
||||
[self.playerNode stop];
|
||||
[self.audioEngine stop];
|
||||
|
||||
self.playing = NO;
|
||||
self.currentSegmentId = nil;
|
||||
self.scheduledSamples = 0;
|
||||
self.playedSamples = 0;
|
||||
|
||||
[self.segmentDurations removeAllObjects];
|
||||
[self.segmentStartTimes removeAllObjects];
|
||||
|
||||
NSLog(@"[AudioStreamPlayer] Stopped");
|
||||
});
|
||||
}
|
||||
|
||||
- (void)enqueuePCMChunk:(NSData *)pcmData
|
||||
sampleRate:(double)sampleRate
|
||||
channels:(int)channels
|
||||
segmentId:(NSString *)segmentId {
|
||||
|
||||
if (!pcmData || pcmData.length == 0)
|
||||
return;
|
||||
|
||||
dispatch_async(self.playerQueue, ^{
|
||||
// 检查是否是新片段
|
||||
BOOL isNewSegment = ![segmentId isEqualToString:self.currentSegmentId];
|
||||
if (isNewSegment) {
|
||||
self.currentSegmentId = segmentId;
|
||||
self.scheduledSamples = 0;
|
||||
self.segmentStartTimes[segmentId] = @(CACurrentMediaTime());
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(audioStreamPlayerDidStartSegment:)]) {
|
||||
[self.delegate audioStreamPlayerDidStartSegment:segmentId];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 转换 Int16 -> Float32
|
||||
NSUInteger sampleCount = pcmData.length / sizeof(int16_t);
|
||||
const int16_t *int16Samples = (const int16_t *)pcmData.bytes;
|
||||
|
||||
// 创建播放格式的 buffer
|
||||
AVAudioFormat *format =
|
||||
[[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
|
||||
sampleRate:sampleRate
|
||||
channels:channels
|
||||
interleaved:NO];
|
||||
|
||||
AVAudioPCMBuffer *buffer = [[AVAudioPCMBuffer alloc]
|
||||
initWithPCMFormat:format
|
||||
frameCapacity:(AVAudioFrameCount)sampleCount];
|
||||
buffer.frameLength = (AVAudioFrameCount)sampleCount;
|
||||
|
||||
float *floatChannel = buffer.floatChannelData[0];
|
||||
for (NSUInteger i = 0; i < sampleCount; i++) {
|
||||
floatChannel[i] = (float)int16Samples[i] / 32768.0f;
|
||||
}
|
||||
|
||||
// 调度播放
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[self.playerNode scheduleBuffer:buffer
|
||||
completionHandler:^{
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf)
|
||||
return;
|
||||
|
||||
dispatch_async(strongSelf.playerQueue, ^{
|
||||
strongSelf.playedSamples += sampleCount;
|
||||
});
|
||||
}];
|
||||
|
||||
self.scheduledSamples += sampleCount;
|
||||
|
||||
// 更新时长
|
||||
NSTimeInterval chunkDuration = (double)sampleCount / sampleRate;
|
||||
NSNumber *currentDuration = self.segmentDurations[segmentId];
|
||||
self.segmentDurations[segmentId] =
|
||||
@(currentDuration.doubleValue + chunkDuration);
|
||||
});
|
||||
}
|
||||
|
||||
- (NSTimeInterval)playbackTimeForSegment:(NSString *)segmentId {
|
||||
if (![segmentId isEqualToString:self.currentSegmentId]) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// 基于已播放的采样数估算时间
|
||||
return (double)self.playedSamples / self.playbackFormat.sampleRate;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)durationForSegment:(NSString *)segmentId {
|
||||
NSNumber *duration = self.segmentDurations[segmentId];
|
||||
return duration ? duration.doubleValue : 0;
|
||||
}
|
||||
|
||||
#pragma mark - Progress Timer
|
||||
|
||||
- (void)startProgressTimer {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
self.progressTimer =
|
||||
[NSTimer scheduledTimerWithTimeInterval:1.0 / 30.0
|
||||
target:self
|
||||
selector:@selector(updateProgress)
|
||||
userInfo:nil
|
||||
repeats:YES];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)stopProgressTimer {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[self.progressTimer invalidate];
|
||||
self.progressTimer = nil;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)updateProgress {
|
||||
if (!self.playing || !self.currentSegmentId) {
|
||||
return;
|
||||
}
|
||||
|
||||
NSTimeInterval currentTime =
|
||||
[self playbackTimeForSegment:self.currentSegmentId];
|
||||
NSString *segmentId = self.currentSegmentId;
|
||||
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(audioStreamPlayerDidUpdateTime:segmentId:)]) {
|
||||
[self.delegate audioStreamPlayerDidUpdateTime:currentTime
|
||||
segmentId:segmentId];
|
||||
}
|
||||
|
||||
// 检查是否播放完成
|
||||
NSTimeInterval duration = [self durationForSegment:segmentId];
|
||||
if (duration > 0 && currentTime >= duration - 0.1) {
|
||||
// 播放完成
|
||||
dispatch_async(self.playerQueue, ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(audioStreamPlayerDidFinishSegment:)]) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[self.delegate audioStreamPlayerDidFinishSegment:segmentId];
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,88 +0,0 @@
|
||||
//
|
||||
// ConversationOrchestrator.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// 对话状态
|
||||
typedef NS_ENUM(NSInteger, ConversationState) {
|
||||
ConversationStateIdle = 0, // 空闲
|
||||
ConversationStateListening, // 正在录音
|
||||
ConversationStateRecognizing, // 正在识别(等待 ASR 结果)
|
||||
ConversationStateThinking, // 正在思考(等待 LLM 回复)
|
||||
ConversationStateSpeaking // 正在播报 TTS
|
||||
};
|
||||
|
||||
/// 对话编排器
|
||||
/// 核心状态机,串联所有模块,处理打断逻辑
|
||||
@interface ConversationOrchestrator : NSObject
|
||||
|
||||
/// 当前状态
|
||||
@property(nonatomic, assign, readonly) ConversationState state;
|
||||
|
||||
/// 当前对话 ID
|
||||
@property(nonatomic, copy, readonly, nullable) NSString *conversationId;
|
||||
|
||||
#pragma mark - Callbacks
|
||||
|
||||
/// 用户最终识别文本回调
|
||||
@property(nonatomic, copy, nullable) void (^onUserFinalText)(NSString *text);
|
||||
|
||||
/// AI 可见文本回调(打字机效果)
|
||||
@property(nonatomic, copy, nullable) void (^onAssistantVisibleText)
|
||||
(NSString *text);
|
||||
|
||||
/// AI 完整回复文本回调
|
||||
@property(nonatomic, copy, nullable) void (^onAssistantFullText)(NSString *text)
|
||||
;
|
||||
|
||||
/// 实时识别文本回调(部分结果)
|
||||
@property(nonatomic, copy, nullable) void (^onPartialText)(NSString *text);
|
||||
|
||||
/// 音量更新回调(用于波形 UI)
|
||||
@property(nonatomic, copy, nullable) void (^onVolumeUpdate)(float rms);
|
||||
|
||||
/// 状态变化回调
|
||||
@property(nonatomic, copy, nullable) void (^onStateChange)
|
||||
(ConversationState state);
|
||||
|
||||
/// 错误回调
|
||||
@property(nonatomic, copy, nullable) void (^onError)(NSError *error);
|
||||
|
||||
/// AI 开始说话回调
|
||||
@property(nonatomic, copy, nullable) void (^onSpeakingStart)(void);
|
||||
|
||||
/// AI 说话结束回调
|
||||
@property(nonatomic, copy, nullable) void (^onSpeakingEnd)(void);
|
||||
|
||||
#pragma mark - Configuration
|
||||
|
||||
/// ASR 服务器 URL
|
||||
@property(nonatomic, copy) NSString *asrServerURL;
|
||||
|
||||
/// LLM 服务器 URL
|
||||
@property(nonatomic, copy) NSString *llmServerURL;
|
||||
|
||||
/// TTS 服务器 URL
|
||||
@property(nonatomic, copy) NSString *ttsServerURL;
|
||||
|
||||
#pragma mark - User Actions
|
||||
|
||||
/// 用户按下录音按钮
|
||||
/// 如果当前正在播放,会自动打断
|
||||
- (void)userDidPressRecord;
|
||||
|
||||
/// 用户松开录音按钮
|
||||
- (void)userDidReleaseRecord;
|
||||
|
||||
/// 手动停止(退出页面等)
|
||||
- (void)stop;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,532 +0,0 @@
|
||||
//
|
||||
// ConversationOrchestrator.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "ConversationOrchestrator.h"
|
||||
#import "ASRStreamClient.h"
|
||||
#import "AudioCaptureManager.h"
|
||||
#import "AudioSessionManager.h"
|
||||
#import "LLMStreamClient.h"
|
||||
#import "Segmenter.h"
|
||||
#import "SubtitleSync.h"
|
||||
#import "TTSPlaybackPipeline.h"
|
||||
#import "TTSServiceClient.h"
|
||||
|
||||
@interface ConversationOrchestrator () <
|
||||
AudioSessionManagerDelegate, AudioCaptureManagerDelegate,
|
||||
ASRStreamClientDelegate, LLMStreamClientDelegate, TTSServiceClientDelegate,
|
||||
TTSPlaybackPipelineDelegate>
|
||||
|
||||
// 模块
|
||||
@property(nonatomic, strong) AudioSessionManager *audioSession;
|
||||
@property(nonatomic, strong) AudioCaptureManager *audioCapture;
|
||||
@property(nonatomic, strong) ASRStreamClient *asrClient;
|
||||
@property(nonatomic, strong) LLMStreamClient *llmClient;
|
||||
@property(nonatomic, strong) Segmenter *segmenter;
|
||||
@property(nonatomic, strong) TTSServiceClient *ttsClient;
|
||||
@property(nonatomic, strong) TTSPlaybackPipeline *playbackPipeline;
|
||||
@property(nonatomic, strong) SubtitleSync *subtitleSync;
|
||||
|
||||
// 状态
|
||||
@property(nonatomic, assign) ConversationState state;
|
||||
@property(nonatomic, copy) NSString *conversationId;
|
||||
@property(nonatomic, copy) NSString *currentSessionId;
|
||||
|
||||
// 文本跟踪
|
||||
@property(nonatomic, strong) NSMutableString *fullAssistantText;
|
||||
@property(nonatomic, strong)
|
||||
NSMutableDictionary<NSString *, NSString *> *segmentTextMap;
|
||||
@property(nonatomic, assign) NSInteger segmentCounter;
|
||||
|
||||
// 队列
|
||||
@property(nonatomic, strong) dispatch_queue_t orchestratorQueue;
|
||||
|
||||
@end
|
||||
|
||||
@implementation ConversationOrchestrator
|
||||
|
||||
#pragma mark - Initialization
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_orchestratorQueue = dispatch_queue_create(
|
||||
"com.keyboard.aitalk.orchestrator", DISPATCH_QUEUE_SERIAL);
|
||||
_state = ConversationStateIdle;
|
||||
_conversationId = [[NSUUID UUID] UUIDString];
|
||||
|
||||
_fullAssistantText = [[NSMutableString alloc] init];
|
||||
_segmentTextMap = [[NSMutableDictionary alloc] init];
|
||||
_segmentCounter = 0;
|
||||
|
||||
[self setupModules];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupModules {
|
||||
// Audio Session
|
||||
self.audioSession = [AudioSessionManager sharedManager];
|
||||
self.audioSession.delegate = self;
|
||||
|
||||
// Audio Capture
|
||||
self.audioCapture = [[AudioCaptureManager alloc] init];
|
||||
self.audioCapture.delegate = self;
|
||||
|
||||
// ASR Client
|
||||
self.asrClient = [[ASRStreamClient alloc] init];
|
||||
self.asrClient.delegate = self;
|
||||
|
||||
// LLM Client
|
||||
self.llmClient = [[LLMStreamClient alloc] init];
|
||||
self.llmClient.delegate = self;
|
||||
|
||||
// Segmenter
|
||||
self.segmenter = [[Segmenter alloc] init];
|
||||
|
||||
// TTS Client
|
||||
self.ttsClient = [[TTSServiceClient alloc] init];
|
||||
self.ttsClient.delegate = self;
|
||||
// ElevenLabs 配置(通过后端代理)
|
||||
self.ttsClient.voiceId = @"JBFqnCBsd6RMkjVDRZzb"; // 默认语音 George
|
||||
self.ttsClient.languageCode = @"zh"; // 中文
|
||||
self.ttsClient.expectedPayloadType =
|
||||
TTSPayloadTypeURL; // 使用 URL 模式(简单)
|
||||
|
||||
// Playback Pipeline
|
||||
self.playbackPipeline = [[TTSPlaybackPipeline alloc] init];
|
||||
self.playbackPipeline.delegate = self;
|
||||
|
||||
// Subtitle Sync
|
||||
self.subtitleSync = [[SubtitleSync alloc] init];
|
||||
}
|
||||
|
||||
#pragma mark - Configuration Setters
|
||||
|
||||
- (void)setAsrServerURL:(NSString *)asrServerURL {
|
||||
_asrServerURL = [asrServerURL copy];
|
||||
self.asrClient.serverURL = asrServerURL;
|
||||
}
|
||||
|
||||
- (void)setLlmServerURL:(NSString *)llmServerURL {
|
||||
_llmServerURL = [llmServerURL copy];
|
||||
self.llmClient.serverURL = llmServerURL;
|
||||
}
|
||||
|
||||
- (void)setTtsServerURL:(NSString *)ttsServerURL {
|
||||
_ttsServerURL = [ttsServerURL copy];
|
||||
self.ttsClient.serverURL = ttsServerURL;
|
||||
}
|
||||
|
||||
#pragma mark - User Actions
|
||||
|
||||
- (void)userDidPressRecord {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
NSLog(@"[Orchestrator] userDidPressRecord, current state: %ld",
|
||||
(long)self.state);
|
||||
|
||||
// 如果正在播放或思考,执行打断
|
||||
if (self.state == ConversationStateSpeaking ||
|
||||
self.state == ConversationStateThinking) {
|
||||
[self performBargein];
|
||||
}
|
||||
|
||||
// 检查麦克风权限
|
||||
if (![self.audioSession hasMicrophonePermission]) {
|
||||
[self.audioSession requestMicrophonePermission:^(BOOL granted) {
|
||||
if (granted) {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self startRecording];
|
||||
});
|
||||
}
|
||||
}];
|
||||
return;
|
||||
}
|
||||
|
||||
[self startRecording];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)userDidReleaseRecord {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
NSLog(@"[Orchestrator] userDidReleaseRecord, current state: %ld",
|
||||
(long)self.state);
|
||||
|
||||
if (self.state != ConversationStateListening) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 停止采集
|
||||
[self.audioCapture stopCapture];
|
||||
|
||||
// 请求 ASR 最终结果
|
||||
[self.asrClient finalize];
|
||||
|
||||
// 更新状态
|
||||
[self updateState:ConversationStateRecognizing];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self cancelAll];
|
||||
[self updateState:ConversationStateIdle];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - Private: Recording
|
||||
|
||||
- (void)startRecording {
|
||||
// 配置音频会话
|
||||
NSError *error = nil;
|
||||
if (![self.audioSession configureForConversation:&error]) {
|
||||
[self reportError:error];
|
||||
return;
|
||||
}
|
||||
|
||||
if (![self.audioSession activateSession:&error]) {
|
||||
[self reportError:error];
|
||||
return;
|
||||
}
|
||||
|
||||
// 生成新的会话 ID
|
||||
self.currentSessionId = [[NSUUID UUID] UUIDString];
|
||||
|
||||
// 启动 ASR
|
||||
[self.asrClient startWithSessionId:self.currentSessionId];
|
||||
|
||||
// 启动音频采集
|
||||
if (![self.audioCapture startCapture:&error]) {
|
||||
[self reportError:error];
|
||||
[self.asrClient cancel];
|
||||
return;
|
||||
}
|
||||
|
||||
// 更新状态
|
||||
[self updateState:ConversationStateListening];
|
||||
}
|
||||
|
||||
#pragma mark - Private: Barge-in (打断)
|
||||
|
||||
- (void)performBargein {
|
||||
NSLog(@"[Orchestrator] Performing barge-in");
|
||||
|
||||
// 取消所有正在进行的请求
|
||||
[self.ttsClient cancel];
|
||||
[self.llmClient cancel];
|
||||
[self.asrClient cancel];
|
||||
|
||||
// 停止播放
|
||||
[self.playbackPipeline stop];
|
||||
|
||||
// 清空状态
|
||||
[self.segmenter reset];
|
||||
[self.segmentTextMap removeAllObjects];
|
||||
[self.fullAssistantText setString:@""];
|
||||
self.segmentCounter = 0;
|
||||
}
|
||||
|
||||
- (void)cancelAll {
|
||||
[self.audioCapture stopCapture];
|
||||
[self.asrClient cancel];
|
||||
[self.llmClient cancel];
|
||||
[self.ttsClient cancel];
|
||||
[self.playbackPipeline stop];
|
||||
[self.segmenter reset];
|
||||
[self.audioSession deactivateSession];
|
||||
}
|
||||
|
||||
#pragma mark - Private: State Management
|
||||
|
||||
- (void)updateState:(ConversationState)newState {
|
||||
if (self.state == newState)
|
||||
return;
|
||||
|
||||
ConversationState oldState = self.state;
|
||||
self.state = newState;
|
||||
|
||||
NSLog(@"[Orchestrator] State: %ld -> %ld", (long)oldState, (long)newState);
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onStateChange) {
|
||||
self.onStateChange(newState);
|
||||
}
|
||||
|
||||
// 特殊状态回调
|
||||
if (newState == ConversationStateSpeaking &&
|
||||
oldState != ConversationStateSpeaking) {
|
||||
if (self.onSpeakingStart) {
|
||||
self.onSpeakingStart();
|
||||
}
|
||||
}
|
||||
|
||||
if (oldState == ConversationStateSpeaking &&
|
||||
newState != ConversationStateSpeaking) {
|
||||
if (self.onSpeakingEnd) {
|
||||
self.onSpeakingEnd();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)reportError:(NSError *)error {
|
||||
NSLog(@"[Orchestrator] Error: %@", error.localizedDescription);
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onError) {
|
||||
self.onError(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - AudioCaptureManagerDelegate
|
||||
|
||||
- (void)audioCaptureManagerDidOutputPCMFrame:(NSData *)pcmFrame {
|
||||
// 发送到 ASR
|
||||
[self.asrClient sendAudioPCMFrame:pcmFrame];
|
||||
}
|
||||
|
||||
- (void)audioCaptureManagerDidUpdateRMS:(float)rms {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onVolumeUpdate) {
|
||||
self.onVolumeUpdate(rms);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - AudioSessionManagerDelegate
|
||||
|
||||
- (void)audioSessionManagerDidInterrupt:(KBAudioSessionInterruptionType)type {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
if (type == KBAudioSessionInterruptionTypeBegan) {
|
||||
// 中断开始:停止采集和播放
|
||||
[self cancelAll];
|
||||
[self updateState:ConversationStateIdle];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)audioSessionManagerMicrophonePermissionDenied {
|
||||
NSError *error =
|
||||
[NSError errorWithDomain:@"ConversationOrchestrator"
|
||||
code:-1
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey : @"请在设置中开启麦克风权限"
|
||||
}];
|
||||
[self reportError:error];
|
||||
}
|
||||
|
||||
#pragma mark - ASRStreamClientDelegate
|
||||
|
||||
- (void)asrClientDidReceivePartialText:(NSString *)text {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onPartialText) {
|
||||
self.onPartialText(text);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)asrClientDidReceiveFinalText:(NSString *)text {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
NSLog(@"[Orchestrator] ASR final text: %@", text);
|
||||
|
||||
// 回调用户文本
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onUserFinalText) {
|
||||
self.onUserFinalText(text);
|
||||
}
|
||||
});
|
||||
|
||||
// 如果文本为空,回到空闲
|
||||
if (text.length == 0) {
|
||||
[self updateState:ConversationStateIdle];
|
||||
return;
|
||||
}
|
||||
|
||||
// 更新状态并开始 LLM 请求
|
||||
[self updateState:ConversationStateThinking];
|
||||
|
||||
// 重置文本跟踪
|
||||
[self.fullAssistantText setString:@""];
|
||||
[self.segmentTextMap removeAllObjects];
|
||||
self.segmentCounter = 0;
|
||||
[self.segmenter reset];
|
||||
|
||||
// 启动播放管线
|
||||
NSError *error = nil;
|
||||
if (![self.playbackPipeline start:&error]) {
|
||||
NSLog(@"[Orchestrator] Failed to start playback pipeline: %@",
|
||||
error.localizedDescription);
|
||||
}
|
||||
|
||||
// 发送 LLM 请求
|
||||
[self.llmClient sendUserText:text conversationId:self.conversationId];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)asrClientDidFail:(NSError *)error {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self reportError:error];
|
||||
[self updateState:ConversationStateIdle];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - LLMStreamClientDelegate
|
||||
|
||||
- (void)llmClientDidReceiveToken:(NSString *)token {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
// 追加到完整文本
|
||||
[self.fullAssistantText appendString:token];
|
||||
|
||||
// 追加到分段器
|
||||
[self.segmenter appendToken:token];
|
||||
|
||||
// 检查是否有可触发 TTS 的片段
|
||||
NSArray<NSString *> *segments = [self.segmenter popReadySegments];
|
||||
for (NSString *segmentText in segments) {
|
||||
[self requestTTSForSegment:segmentText];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)llmClientDidComplete {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
NSLog(@"[Orchestrator] LLM complete");
|
||||
|
||||
// 处理剩余片段
|
||||
NSString *remaining = [self.segmenter flushRemainingSegment];
|
||||
if (remaining && remaining.length > 0) {
|
||||
[self requestTTSForSegment:remaining];
|
||||
}
|
||||
|
||||
// 回调完整文本
|
||||
NSString *fullText = [self.fullAssistantText copy];
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onAssistantFullText) {
|
||||
self.onAssistantFullText(fullText);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
- (void)llmClientDidFail:(NSError *)error {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self reportError:error];
|
||||
[self updateState:ConversationStateIdle];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - Private: TTS Request
|
||||
|
||||
- (void)requestTTSForSegment:(NSString *)segmentText {
|
||||
NSString *segmentId =
|
||||
[NSString stringWithFormat:@"seg_%ld", (long)self.segmentCounter++];
|
||||
|
||||
// 记录片段文本
|
||||
self.segmentTextMap[segmentId] = segmentText;
|
||||
|
||||
NSLog(@"[Orchestrator] Requesting TTS for segment %@: %@", segmentId,
|
||||
segmentText);
|
||||
|
||||
// 请求 TTS
|
||||
[self.ttsClient requestTTSForText:segmentText segmentId:segmentId];
|
||||
}
|
||||
|
||||
#pragma mark - TTSServiceClientDelegate
|
||||
|
||||
- (void)ttsClientDidReceiveURL:(NSURL *)url segmentId:(NSString *)segmentId {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self.playbackPipeline enqueueURL:url segmentId:segmentId];
|
||||
|
||||
// 如果还在 Thinking,切换到 Speaking
|
||||
if (self.state == ConversationStateThinking) {
|
||||
[self updateState:ConversationStateSpeaking];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)ttsClientDidReceiveAudioChunk:(NSData *)chunk
|
||||
payloadType:(TTSPayloadType)type
|
||||
segmentId:(NSString *)segmentId {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self.playbackPipeline enqueueChunk:chunk
|
||||
payloadType:type
|
||||
segmentId:segmentId];
|
||||
|
||||
// 如果还在 Thinking,切换到 Speaking
|
||||
if (self.state == ConversationStateThinking) {
|
||||
[self updateState:ConversationStateSpeaking];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)ttsClientDidFinishSegment:(NSString *)segmentId {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self.playbackPipeline markSegmentComplete:segmentId];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)ttsClientDidFail:(NSError *)error {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self reportError:error];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - TTSPlaybackPipelineDelegate
|
||||
|
||||
- (void)pipelineDidStartSegment:(NSString *)segmentId
|
||||
duration:(NSTimeInterval)duration {
|
||||
NSLog(@"[Orchestrator] Started playing segment: %@", segmentId);
|
||||
}
|
||||
|
||||
- (void)pipelineDidUpdatePlaybackTime:(NSTimeInterval)time
|
||||
segmentId:(NSString *)segmentId {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
// 获取片段文本
|
||||
NSString *segmentText = self.segmentTextMap[segmentId];
|
||||
if (!segmentText)
|
||||
return;
|
||||
|
||||
// 计算可见文本
|
||||
NSTimeInterval duration =
|
||||
[self.playbackPipeline durationForSegment:segmentId];
|
||||
NSString *visibleText =
|
||||
[self.subtitleSync visibleTextForFullText:segmentText
|
||||
currentTime:time
|
||||
duration:duration];
|
||||
|
||||
// TODO: 这里应该累加之前片段的文本,实现完整的打字机效果
|
||||
// 简化实现:只显示当前片段
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if (self.onAssistantVisibleText) {
|
||||
self.onAssistantVisibleText(visibleText);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
- (void)pipelineDidFinishSegment:(NSString *)segmentId {
|
||||
NSLog(@"[Orchestrator] Finished playing segment: %@", segmentId);
|
||||
}
|
||||
|
||||
- (void)pipelineDidFinishAllSegments {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
NSLog(@"[Orchestrator] All segments finished");
|
||||
|
||||
// 回到空闲状态
|
||||
[self updateState:ConversationStateIdle];
|
||||
[self.audioSession deactivateSession];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)pipelineDidFail:(NSError *)error {
|
||||
dispatch_async(self.orchestratorQueue, ^{
|
||||
[self reportError:error];
|
||||
[self updateState:ConversationStateIdle];
|
||||
});
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,48 +0,0 @@
|
||||
//
|
||||
// LLMStreamClient.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// LLM 流式生成客户端代理
|
||||
@protocol LLMStreamClientDelegate <NSObject>
|
||||
@required
|
||||
/// 收到新的 token
|
||||
- (void)llmClientDidReceiveToken:(NSString *)token;
|
||||
/// 生成完成
|
||||
- (void)llmClientDidComplete;
|
||||
/// 生成失败
|
||||
- (void)llmClientDidFail:(NSError *)error;
|
||||
@end
|
||||
|
||||
/// LLM 流式生成客户端
|
||||
/// 支持 SSE(Server-Sent Events)或 WebSocket 接收 token 流
|
||||
@interface LLMStreamClient : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<LLMStreamClientDelegate> delegate;
|
||||
|
||||
/// LLM 服务器 URL
|
||||
@property(nonatomic, copy) NSString *serverURL;
|
||||
|
||||
/// API Key(如需要)
|
||||
@property(nonatomic, copy, nullable) NSString *apiKey;
|
||||
|
||||
/// 是否正在生成
|
||||
@property(nonatomic, assign, readonly, getter=isGenerating) BOOL generating;
|
||||
|
||||
/// 发送用户文本请求 LLM 回复
|
||||
/// @param text 用户输入的文本
|
||||
/// @param conversationId 对话 ID
|
||||
- (void)sendUserText:(NSString *)text conversationId:(NSString *)conversationId;
|
||||
|
||||
/// 取消当前请求
|
||||
- (void)cancel;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,244 +0,0 @@
|
||||
//
|
||||
// LLMStreamClient.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "LLMStreamClient.h"
|
||||
|
||||
@interface LLMStreamClient () <NSURLSessionDataDelegate>
|
||||
|
||||
@property(nonatomic, strong) NSURLSession *urlSession;
|
||||
@property(nonatomic, strong) NSURLSessionDataTask *dataTask;
|
||||
@property(nonatomic, strong) dispatch_queue_t networkQueue;
|
||||
@property(nonatomic, assign) BOOL generating;
|
||||
@property(nonatomic, strong) NSMutableString *buffer; // SSE 数据缓冲
|
||||
|
||||
@end
|
||||
|
||||
@implementation LLMStreamClient
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.llm.network",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
_buffer = [[NSMutableString alloc] init];
|
||||
// TODO: 替换为实际的 LLM 服务器地址
|
||||
_serverURL = @"https://your-llm-server.com/api/chat/stream";
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self cancel];
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (void)sendUserText:(NSString *)text
|
||||
conversationId:(NSString *)conversationId {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
[self cancelInternal];
|
||||
|
||||
self.generating = YES;
|
||||
[self.buffer setString:@""];
|
||||
|
||||
// 创建请求
|
||||
NSURL *url = [NSURL URLWithString:self.serverURL];
|
||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
||||
request.HTTPMethod = @"POST";
|
||||
[request setValue:@"application/json" forHTTPHeaderField:@"Content-Type"];
|
||||
[request setValue:@"text/event-stream" forHTTPHeaderField:@"Accept"];
|
||||
|
||||
if (self.apiKey) {
|
||||
[request setValue:[NSString stringWithFormat:@"Bearer %@", self.apiKey]
|
||||
forHTTPHeaderField:@"Authorization"];
|
||||
}
|
||||
|
||||
// 请求体
|
||||
NSDictionary *body = @{
|
||||
@"message" : text,
|
||||
@"conversationId" : conversationId,
|
||||
@"stream" : @YES
|
||||
};
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:body
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
request.HTTPBody = jsonData;
|
||||
|
||||
// 创建会话
|
||||
NSURLSessionConfiguration *config =
|
||||
[NSURLSessionConfiguration defaultSessionConfiguration];
|
||||
config.timeoutIntervalForRequest = 60;
|
||||
config.timeoutIntervalForResource = 300;
|
||||
|
||||
self.urlSession = [NSURLSession sessionWithConfiguration:config
|
||||
delegate:self
|
||||
delegateQueue:nil];
|
||||
|
||||
self.dataTask = [self.urlSession dataTaskWithRequest:request];
|
||||
[self.dataTask resume];
|
||||
|
||||
NSLog(@"[LLMStreamClient] Started request for conversation: %@",
|
||||
conversationId);
|
||||
});
|
||||
}
|
||||
|
||||
- (void)cancel {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
[self cancelInternal];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (void)cancelInternal {
|
||||
self.generating = NO;
|
||||
|
||||
if (self.dataTask) {
|
||||
[self.dataTask cancel];
|
||||
self.dataTask = nil;
|
||||
}
|
||||
|
||||
if (self.urlSession) {
|
||||
[self.urlSession invalidateAndCancel];
|
||||
self.urlSession = nil;
|
||||
}
|
||||
|
||||
[self.buffer setString:@""];
|
||||
}
|
||||
|
||||
- (void)reportError:(NSError *)error {
|
||||
self.generating = NO;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector(llmClientDidFail:)]) {
|
||||
[self.delegate llmClientDidFail:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)reportComplete {
|
||||
self.generating = NO;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector(llmClientDidComplete)]) {
|
||||
[self.delegate llmClientDidComplete];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)reportToken:(NSString *)token {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(llmClientDidReceiveToken:)]) {
|
||||
[self.delegate llmClientDidReceiveToken:token];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - SSE Parsing
|
||||
|
||||
- (void)parseSSEData:(NSData *)data {
|
||||
NSString *string = [[NSString alloc] initWithData:data
|
||||
encoding:NSUTF8StringEncoding];
|
||||
if (!string)
|
||||
return;
|
||||
|
||||
[self.buffer appendString:string];
|
||||
|
||||
// SSE 格式:每个事件以 \n\n 分隔
|
||||
NSArray *events = [self.buffer componentsSeparatedByString:@"\n\n"];
|
||||
|
||||
// 保留最后一个可能不完整的事件
|
||||
if (events.count > 1) {
|
||||
[self.buffer setString:events.lastObject];
|
||||
|
||||
for (NSUInteger i = 0; i < events.count - 1; i++) {
|
||||
[self handleSSEEvent:events[i]];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)handleSSEEvent:(NSString *)event {
|
||||
if (event.length == 0)
|
||||
return;
|
||||
|
||||
// 解析 SSE 事件
|
||||
// 格式: data: {...}
|
||||
NSArray *lines = [event componentsSeparatedByString:@"\n"];
|
||||
|
||||
for (NSString *line in lines) {
|
||||
if ([line hasPrefix:@"data: "]) {
|
||||
NSString *dataString = [line substringFromIndex:6];
|
||||
|
||||
// 检查是否是结束标志
|
||||
if ([dataString isEqualToString:@"[DONE]"]) {
|
||||
[self reportComplete];
|
||||
return;
|
||||
}
|
||||
|
||||
// 解析 JSON
|
||||
NSData *jsonData = [dataString dataUsingEncoding:NSUTF8StringEncoding];
|
||||
NSError *jsonError = nil;
|
||||
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:jsonData
|
||||
options:0
|
||||
error:&jsonError];
|
||||
|
||||
if (jsonError) {
|
||||
NSLog(@"[LLMStreamClient] Failed to parse SSE data: %@", dataString);
|
||||
continue;
|
||||
}
|
||||
|
||||
// 提取 token(根据实际 API 格式调整)
|
||||
// 常见格式: {"token": "..."} 或 {"choices": [{"delta": {"content":
|
||||
// "..."}}]}
|
||||
NSString *token = json[@"token"];
|
||||
if (!token) {
|
||||
// OpenAI 格式
|
||||
NSArray *choices = json[@"choices"];
|
||||
if (choices.count > 0) {
|
||||
NSDictionary *delta = choices[0][@"delta"];
|
||||
token = delta[@"content"];
|
||||
}
|
||||
}
|
||||
|
||||
if (token && token.length > 0) {
|
||||
[self reportToken:token];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - NSURLSessionDataDelegate
|
||||
|
||||
- (void)URLSession:(NSURLSession *)session
|
||||
dataTask:(NSURLSessionDataTask *)dataTask
|
||||
didReceiveData:(NSData *)data {
|
||||
[self parseSSEData:data];
|
||||
}
|
||||
|
||||
- (void)URLSession:(NSURLSession *)session
|
||||
task:(NSURLSessionTask *)task
|
||||
didCompleteWithError:(NSError *)error {
|
||||
if (error) {
|
||||
if (error.code != NSURLErrorCancelled) {
|
||||
[self reportError:error];
|
||||
}
|
||||
} else {
|
||||
// 处理缓冲区中剩余的数据
|
||||
if (self.buffer.length > 0) {
|
||||
[self handleSSEEvent:self.buffer];
|
||||
[self.buffer setString:@""];
|
||||
}
|
||||
[self reportComplete];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,37 +0,0 @@
|
||||
//
|
||||
// Segmenter.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// 句子切分器
|
||||
/// 将 LLM 输出的 token 流切分成可触发 TTS 的句子片段
|
||||
@interface Segmenter : NSObject
|
||||
|
||||
/// 累积字符数阈值(超过此值强制切分)
|
||||
/// 默认:30
|
||||
@property(nonatomic, assign) NSUInteger maxCharacterThreshold;
|
||||
|
||||
/// 追加 token
|
||||
/// @param token LLM 输出的 token
|
||||
- (void)appendToken:(NSString *)token;
|
||||
|
||||
/// 获取并移除已准备好的片段
|
||||
/// @return 可立即进行 TTS 的片段数组
|
||||
- (NSArray<NSString *> *)popReadySegments;
|
||||
|
||||
/// 获取剩余的未完成片段(用于最后 flush)
|
||||
/// @return 剩余片段,可能为空
|
||||
- (NSString *)flushRemainingSegment;
|
||||
|
||||
/// 重置状态
|
||||
- (void)reset;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,148 +0,0 @@
|
||||
//
|
||||
// Segmenter.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "Segmenter.h"
|
||||
|
||||
@interface Segmenter ()
|
||||
|
||||
@property(nonatomic, strong) NSMutableString *buffer;
|
||||
@property(nonatomic, strong) NSMutableArray<NSString *> *readySegments;
|
||||
|
||||
@end
|
||||
|
||||
@implementation Segmenter
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_buffer = [[NSMutableString alloc] init];
|
||||
_readySegments = [[NSMutableArray alloc] init];
|
||||
_maxCharacterThreshold = 30;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (void)appendToken:(NSString *)token {
|
||||
if (!token || token.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
[self.buffer appendString:token];
|
||||
|
||||
// 检查是否需要切分
|
||||
[self checkAndSplit];
|
||||
}
|
||||
|
||||
- (NSArray<NSString *> *)popReadySegments {
|
||||
NSArray *segments = [self.readySegments copy];
|
||||
[self.readySegments removeAllObjects];
|
||||
return segments;
|
||||
}
|
||||
|
||||
- (NSString *)flushRemainingSegment {
|
||||
NSString *remaining = [self.buffer copy];
|
||||
[self.buffer setString:@""];
|
||||
|
||||
// 去除首尾空白
|
||||
remaining = [remaining
|
||||
stringByTrimmingCharactersInSet:[NSCharacterSet
|
||||
whitespaceAndNewlineCharacterSet]];
|
||||
|
||||
return remaining.length > 0 ? remaining : nil;
|
||||
}
|
||||
|
||||
- (void)reset {
|
||||
[self.buffer setString:@""];
|
||||
[self.readySegments removeAllObjects];
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (void)checkAndSplit {
|
||||
// 句子结束标点
|
||||
NSCharacterSet *sentenceEnders =
|
||||
[NSCharacterSet characterSetWithCharactersInString:@"。!?\n"];
|
||||
|
||||
while (YES) {
|
||||
NSString *currentBuffer = self.buffer;
|
||||
|
||||
// 查找第一个句子结束标点
|
||||
NSRange range = [currentBuffer rangeOfCharacterFromSet:sentenceEnders];
|
||||
|
||||
if (range.location != NSNotFound) {
|
||||
// 找到结束标点,切分
|
||||
NSUInteger endIndex = range.location + 1;
|
||||
NSString *segment = [currentBuffer substringToIndex:endIndex];
|
||||
segment = [segment stringByTrimmingCharactersInSet:
|
||||
[NSCharacterSet whitespaceAndNewlineCharacterSet]];
|
||||
|
||||
if (segment.length > 0) {
|
||||
[self.readySegments addObject:segment];
|
||||
}
|
||||
|
||||
// 移除已切分的部分
|
||||
[self.buffer deleteCharactersInRange:NSMakeRange(0, endIndex)];
|
||||
} else if (currentBuffer.length >= self.maxCharacterThreshold) {
|
||||
// 未找到标点,但超过阈值,强制切分
|
||||
// 尝试在空格或逗号处切分
|
||||
NSRange breakRange = [self findBestBreakPoint:currentBuffer];
|
||||
|
||||
if (breakRange.location != NSNotFound) {
|
||||
NSString *segment =
|
||||
[currentBuffer substringToIndex:breakRange.location + 1];
|
||||
segment =
|
||||
[segment stringByTrimmingCharactersInSet:
|
||||
[NSCharacterSet whitespaceAndNewlineCharacterSet]];
|
||||
|
||||
if (segment.length > 0) {
|
||||
[self.readySegments addObject:segment];
|
||||
}
|
||||
|
||||
[self.buffer
|
||||
deleteCharactersInRange:NSMakeRange(0, breakRange.location + 1)];
|
||||
} else {
|
||||
// 无法找到合适的断点,直接切分
|
||||
NSString *segment =
|
||||
[currentBuffer substringToIndex:self.maxCharacterThreshold];
|
||||
segment =
|
||||
[segment stringByTrimmingCharactersInSet:
|
||||
[NSCharacterSet whitespaceAndNewlineCharacterSet]];
|
||||
|
||||
if (segment.length > 0) {
|
||||
[self.readySegments addObject:segment];
|
||||
}
|
||||
|
||||
[self.buffer
|
||||
deleteCharactersInRange:NSMakeRange(0, self.maxCharacterThreshold)];
|
||||
}
|
||||
} else {
|
||||
// 未达到切分条件
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (NSRange)findBestBreakPoint:(NSString *)text {
|
||||
// 优先在逗号、分号等处断开
|
||||
NSCharacterSet *breakChars =
|
||||
[NSCharacterSet characterSetWithCharactersInString:@",,、;;:: "];
|
||||
|
||||
// 从后往前查找,尽可能多包含内容
|
||||
for (NSInteger i = text.length - 1; i >= self.maxCharacterThreshold / 2;
|
||||
i--) {
|
||||
unichar c = [text characterAtIndex:i];
|
||||
if ([breakChars characterIsMember:c]) {
|
||||
return NSMakeRange(i, 1);
|
||||
}
|
||||
}
|
||||
|
||||
return NSMakeRange(NSNotFound, 0);
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,36 +0,0 @@
|
||||
//
|
||||
// SubtitleSync.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// 字幕同步器
|
||||
/// 根据播放进度映射文字显示,实现打字机效果
|
||||
@interface SubtitleSync : NSObject
|
||||
|
||||
/// 获取当前应显示的文本
|
||||
/// @param fullText 完整文本
|
||||
/// @param currentTime 当前播放时间(秒)
|
||||
/// @param duration 总时长(秒)
|
||||
/// @return 应显示的部分文本(打字机效果)
|
||||
- (NSString *)visibleTextForFullText:(NSString *)fullText
|
||||
currentTime:(NSTimeInterval)currentTime
|
||||
duration:(NSTimeInterval)duration;
|
||||
|
||||
/// 获取可见字符数
|
||||
/// @param fullText 完整文本
|
||||
/// @param currentTime 当前播放时间(秒)
|
||||
/// @param duration 总时长(秒)
|
||||
/// @return 应显示的字符数
|
||||
- (NSUInteger)visibleCountForFullText:(NSString *)fullText
|
||||
currentTime:(NSTimeInterval)currentTime
|
||||
duration:(NSTimeInterval)duration;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,66 +0,0 @@
|
||||
//
|
||||
// SubtitleSync.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "SubtitleSync.h"
|
||||
|
||||
@implementation SubtitleSync
|
||||
|
||||
- (NSString *)visibleTextForFullText:(NSString *)fullText
|
||||
currentTime:(NSTimeInterval)currentTime
|
||||
duration:(NSTimeInterval)duration {
|
||||
|
||||
if (!fullText || fullText.length == 0) {
|
||||
return @"";
|
||||
}
|
||||
|
||||
NSUInteger visibleCount = [self visibleCountForFullText:fullText
|
||||
currentTime:currentTime
|
||||
duration:duration];
|
||||
|
||||
if (visibleCount >= fullText.length) {
|
||||
return fullText;
|
||||
}
|
||||
|
||||
return [fullText substringToIndex:visibleCount];
|
||||
}
|
||||
|
||||
- (NSUInteger)visibleCountForFullText:(NSString *)fullText
|
||||
currentTime:(NSTimeInterval)currentTime
|
||||
duration:(NSTimeInterval)duration {
|
||||
|
||||
if (!fullText || fullText.length == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// 边界情况处理
|
||||
if (duration <= 0) {
|
||||
// 如果没有时长信息,直接返回全部
|
||||
return fullText.length;
|
||||
}
|
||||
|
||||
if (currentTime <= 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (currentTime >= duration) {
|
||||
return fullText.length;
|
||||
}
|
||||
|
||||
// 计算进度比例
|
||||
double progress = currentTime / duration;
|
||||
|
||||
// 计算可见字符数
|
||||
// 使用略微超前的策略,确保文字不会落后于语音
|
||||
double adjustedProgress = MIN(progress * 1.05, 1.0);
|
||||
|
||||
NSUInteger visibleCount =
|
||||
(NSUInteger)round(fullText.length * adjustedProgress);
|
||||
|
||||
return MIN(visibleCount, fullText.length);
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,79 +0,0 @@
|
||||
//
|
||||
// TTSPlaybackPipeline.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "TTSServiceClient.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// 播放管线代理
|
||||
@protocol TTSPlaybackPipelineDelegate <NSObject>
|
||||
@optional
|
||||
/// 开始播放片段
|
||||
- (void)pipelineDidStartSegment:(NSString *)segmentId
|
||||
duration:(NSTimeInterval)duration;
|
||||
/// 播放时间更新
|
||||
- (void)pipelineDidUpdatePlaybackTime:(NSTimeInterval)time
|
||||
segmentId:(NSString *)segmentId;
|
||||
/// 片段播放完成
|
||||
- (void)pipelineDidFinishSegment:(NSString *)segmentId;
|
||||
/// 所有片段播放完成
|
||||
- (void)pipelineDidFinishAllSegments;
|
||||
/// 播放出错
|
||||
- (void)pipelineDidFail:(NSError *)error;
|
||||
@end
|
||||
|
||||
/// TTS 播放管线
|
||||
/// 根据 payloadType 路由到对应播放器
|
||||
@interface TTSPlaybackPipeline : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<TTSPlaybackPipelineDelegate> delegate;
|
||||
|
||||
/// 是否正在播放
|
||||
@property(nonatomic, assign, readonly, getter=isPlaying) BOOL playing;
|
||||
|
||||
/// 当前播放的片段 ID
|
||||
@property(nonatomic, copy, readonly, nullable) NSString *currentSegmentId;
|
||||
|
||||
/// 启动管线
|
||||
/// @param error 错误信息
|
||||
/// @return 是否启动成功
|
||||
- (BOOL)start:(NSError **)error;
|
||||
|
||||
/// 停止管线(立即停止,用于打断)
|
||||
- (void)stop;
|
||||
|
||||
/// 入队 URL 播放
|
||||
/// @param url 音频 URL
|
||||
/// @param segmentId 片段 ID
|
||||
- (void)enqueueURL:(NSURL *)url segmentId:(NSString *)segmentId;
|
||||
|
||||
/// 入队音频数据块
|
||||
/// @param chunk 音频数据
|
||||
/// @param type 数据类型
|
||||
/// @param segmentId 片段 ID
|
||||
- (void)enqueueChunk:(NSData *)chunk
|
||||
payloadType:(TTSPayloadType)type
|
||||
segmentId:(NSString *)segmentId;
|
||||
|
||||
/// 标记片段数据完成(用于流式模式)
|
||||
/// @param segmentId 片段 ID
|
||||
- (void)markSegmentComplete:(NSString *)segmentId;
|
||||
|
||||
/// 获取片段的当前播放时间
|
||||
/// @param segmentId 片段 ID
|
||||
/// @return 当前时间(秒),如果未在播放则返回 0
|
||||
- (NSTimeInterval)currentTimeForSegment:(NSString *)segmentId;
|
||||
|
||||
/// 获取片段的总时长
|
||||
/// @param segmentId 片段 ID
|
||||
/// @return 总时长(秒)
|
||||
- (NSTimeInterval)durationForSegment:(NSString *)segmentId;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,343 +0,0 @@
|
||||
//
|
||||
// TTSPlaybackPipeline.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "TTSPlaybackPipeline.h"
|
||||
#import "AudioStreamPlayer.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
@interface TTSPlaybackPipeline () <AudioStreamPlayerDelegate>
|
||||
|
||||
// 播放器
|
||||
@property(nonatomic, strong) AVPlayer *urlPlayer;
|
||||
@property(nonatomic, strong) AudioStreamPlayer *streamPlayer;
|
||||
|
||||
// 片段队列
|
||||
@property(nonatomic, strong) NSMutableArray<NSDictionary *> *segmentQueue;
|
||||
@property(nonatomic, strong)
|
||||
NSMutableDictionary<NSString *, NSNumber *> *segmentDurations;
|
||||
|
||||
// 状态
|
||||
@property(nonatomic, assign) BOOL playing;
|
||||
@property(nonatomic, copy) NSString *currentSegmentId;
|
||||
@property(nonatomic, strong) id playerTimeObserver;
|
||||
|
||||
// 队列
|
||||
@property(nonatomic, strong) dispatch_queue_t playbackQueue;
|
||||
|
||||
@end
|
||||
|
||||
@implementation TTSPlaybackPipeline
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_segmentQueue = [[NSMutableArray alloc] init];
|
||||
_segmentDurations = [[NSMutableDictionary alloc] init];
|
||||
_playbackQueue = dispatch_queue_create("com.keyboard.aitalk.playback",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self stop];
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (BOOL)start:(NSError **)error {
|
||||
// 初始化 stream player
|
||||
if (!self.streamPlayer) {
|
||||
self.streamPlayer = [[AudioStreamPlayer alloc] init];
|
||||
self.streamPlayer.delegate = self;
|
||||
}
|
||||
|
||||
return [self.streamPlayer start:error];
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
dispatch_async(self.playbackQueue, ^{
|
||||
// 停止 URL 播放
|
||||
if (self.urlPlayer) {
|
||||
[self.urlPlayer pause];
|
||||
if (self.playerTimeObserver) {
|
||||
[self.urlPlayer removeTimeObserver:self.playerTimeObserver];
|
||||
self.playerTimeObserver = nil;
|
||||
}
|
||||
self.urlPlayer = nil;
|
||||
}
|
||||
|
||||
// 停止流式播放
|
||||
[self.streamPlayer stop];
|
||||
|
||||
// 清空队列
|
||||
[self.segmentQueue removeAllObjects];
|
||||
[self.segmentDurations removeAllObjects];
|
||||
|
||||
self.playing = NO;
|
||||
self.currentSegmentId = nil;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)enqueueURL:(NSURL *)url segmentId:(NSString *)segmentId {
|
||||
if (!url || !segmentId)
|
||||
return;
|
||||
|
||||
dispatch_async(self.playbackQueue, ^{
|
||||
NSDictionary *segment = @{
|
||||
@"type" : @(TTSPayloadTypeURL),
|
||||
@"url" : url,
|
||||
@"segmentId" : segmentId
|
||||
};
|
||||
[self.segmentQueue addObject:segment];
|
||||
|
||||
// 如果当前没有在播放,开始播放
|
||||
if (!self.playing) {
|
||||
[self playNextSegment];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)enqueueChunk:(NSData *)chunk
|
||||
payloadType:(TTSPayloadType)type
|
||||
segmentId:(NSString *)segmentId {
|
||||
if (!chunk || !segmentId)
|
||||
return;
|
||||
|
||||
dispatch_async(self.playbackQueue, ^{
|
||||
switch (type) {
|
||||
case TTSPayloadTypePCMChunk:
|
||||
// 直接喂给 stream player
|
||||
[self.streamPlayer enqueuePCMChunk:chunk
|
||||
sampleRate:16000
|
||||
channels:1
|
||||
segmentId:segmentId];
|
||||
|
||||
if (!self.playing) {
|
||||
self.playing = YES;
|
||||
self.currentSegmentId = segmentId;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(pipelineDidStartSegment:duration:)]) {
|
||||
[self.delegate pipelineDidStartSegment:segmentId duration:0];
|
||||
}
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case TTSPayloadTypeAACChunk:
|
||||
// TODO: AAC 解码 -> PCM -> streamPlayer
|
||||
NSLog(@"[TTSPlaybackPipeline] AAC chunk decoding not implemented yet");
|
||||
break;
|
||||
|
||||
case TTSPayloadTypeOpusChunk:
|
||||
// TODO: Opus 解码 -> PCM -> streamPlayer
|
||||
NSLog(@"[TTSPlaybackPipeline] Opus chunk decoding not implemented yet");
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)markSegmentComplete:(NSString *)segmentId {
|
||||
// Stream player 会自动处理播放完成
|
||||
}
|
||||
|
||||
- (NSTimeInterval)currentTimeForSegment:(NSString *)segmentId {
|
||||
if (![segmentId isEqualToString:self.currentSegmentId]) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (self.urlPlayer) {
|
||||
return CMTimeGetSeconds(self.urlPlayer.currentTime);
|
||||
}
|
||||
|
||||
return [self.streamPlayer playbackTimeForSegment:segmentId];
|
||||
}
|
||||
|
||||
- (NSTimeInterval)durationForSegment:(NSString *)segmentId {
|
||||
NSNumber *duration = self.segmentDurations[segmentId];
|
||||
if (duration) {
|
||||
return duration.doubleValue;
|
||||
}
|
||||
|
||||
if (self.urlPlayer && [segmentId isEqualToString:self.currentSegmentId]) {
|
||||
CMTime duration = self.urlPlayer.currentItem.duration;
|
||||
if (CMTIME_IS_VALID(duration)) {
|
||||
return CMTimeGetSeconds(duration);
|
||||
}
|
||||
}
|
||||
|
||||
return [self.streamPlayer durationForSegment:segmentId];
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (void)playNextSegment {
|
||||
if (self.segmentQueue.count == 0) {
|
||||
self.playing = NO;
|
||||
self.currentSegmentId = nil;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(pipelineDidFinishAllSegments)]) {
|
||||
[self.delegate pipelineDidFinishAllSegments];
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
NSDictionary *segment = self.segmentQueue.firstObject;
|
||||
[self.segmentQueue removeObjectAtIndex:0];
|
||||
|
||||
TTSPayloadType type = [segment[@"type"] integerValue];
|
||||
NSString *segmentId = segment[@"segmentId"];
|
||||
|
||||
self.playing = YES;
|
||||
self.currentSegmentId = segmentId;
|
||||
|
||||
if (type == TTSPayloadTypeURL) {
|
||||
NSURL *url = segment[@"url"];
|
||||
[self playURL:url segmentId:segmentId];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)playURL:(NSURL *)url segmentId:(NSString *)segmentId {
|
||||
AVPlayerItem *item = [AVPlayerItem playerItemWithURL:url];
|
||||
|
||||
if (!self.urlPlayer) {
|
||||
self.urlPlayer = [AVPlayer playerWithPlayerItem:item];
|
||||
} else {
|
||||
[self.urlPlayer replaceCurrentItemWithPlayerItem:item];
|
||||
}
|
||||
|
||||
// 监听播放完成
|
||||
[[NSNotificationCenter defaultCenter]
|
||||
addObserver:self
|
||||
selector:@selector(playerItemDidFinish:)
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:item];
|
||||
|
||||
// 添加时间观察器
|
||||
__weak typeof(self) weakSelf = self;
|
||||
self.playerTimeObserver = [self.urlPlayer
|
||||
addPeriodicTimeObserverForInterval:CMTimeMake(1, 30)
|
||||
queue:dispatch_get_main_queue()
|
||||
usingBlock:^(CMTime time) {
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf)
|
||||
return;
|
||||
|
||||
NSTimeInterval currentTime =
|
||||
CMTimeGetSeconds(time);
|
||||
if ([strongSelf.delegate
|
||||
respondsToSelector:@selector
|
||||
(pipelineDidUpdatePlaybackTime:
|
||||
segmentId:)]) {
|
||||
[strongSelf.delegate
|
||||
pipelineDidUpdatePlaybackTime:currentTime
|
||||
segmentId:segmentId];
|
||||
}
|
||||
}];
|
||||
|
||||
// 等待资源加载后获取时长并开始播放
|
||||
[item.asset
|
||||
loadValuesAsynchronouslyForKeys:@[ @"duration" ]
|
||||
completionHandler:^{
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
NSTimeInterval duration =
|
||||
CMTimeGetSeconds(item.duration);
|
||||
if (!isnan(duration)) {
|
||||
self.segmentDurations[segmentId] = @(duration);
|
||||
}
|
||||
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(pipelineDidStartSegment:
|
||||
duration:)]) {
|
||||
[self.delegate pipelineDidStartSegment:segmentId
|
||||
duration:duration];
|
||||
}
|
||||
|
||||
[self.urlPlayer play];
|
||||
});
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)playerItemDidFinish:(NSNotification *)notification {
|
||||
[[NSNotificationCenter defaultCenter]
|
||||
removeObserver:self
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:notification.object];
|
||||
|
||||
if (self.playerTimeObserver) {
|
||||
[self.urlPlayer removeTimeObserver:self.playerTimeObserver];
|
||||
self.playerTimeObserver = nil;
|
||||
}
|
||||
|
||||
NSString *finishedSegmentId = self.currentSegmentId;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(pipelineDidFinishSegment:)]) {
|
||||
[self.delegate pipelineDidFinishSegment:finishedSegmentId];
|
||||
}
|
||||
});
|
||||
|
||||
dispatch_async(self.playbackQueue, ^{
|
||||
[self playNextSegment];
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - AudioStreamPlayerDelegate
|
||||
|
||||
- (void)audioStreamPlayerDidStartSegment:(NSString *)segmentId {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(pipelineDidStartSegment:duration:)]) {
|
||||
[self.delegate pipelineDidStartSegment:segmentId duration:0];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)audioStreamPlayerDidUpdateTime:(NSTimeInterval)time
|
||||
segmentId:(NSString *)segmentId {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(pipelineDidUpdatePlaybackTime:segmentId:)]) {
|
||||
[self.delegate pipelineDidUpdatePlaybackTime:time segmentId:segmentId];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)audioStreamPlayerDidFinishSegment:(NSString *)segmentId {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(pipelineDidFinishSegment:)]) {
|
||||
[self.delegate pipelineDidFinishSegment:segmentId];
|
||||
}
|
||||
});
|
||||
|
||||
dispatch_async(self.playbackQueue, ^{
|
||||
// 检查是否还有更多片段
|
||||
if (self.segmentQueue.count == 0) {
|
||||
self.playing = NO;
|
||||
self.currentSegmentId = nil;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(pipelineDidFinishAllSegments)]) {
|
||||
[self.delegate pipelineDidFinishAllSegments];
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,66 +0,0 @@
|
||||
//
|
||||
// TTSServiceClient.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
/// TTS 返回数据类型
|
||||
typedef NS_ENUM(NSInteger, TTSPayloadType) {
|
||||
TTSPayloadTypeURL = 0, // 模式 A:返回 m4a/MP3 URL
|
||||
TTSPayloadTypePCMChunk, // 模式 D:返回 PCM chunk
|
||||
TTSPayloadTypeAACChunk, // 模式 B:返回 AAC chunk
|
||||
TTSPayloadTypeOpusChunk // 模式 C:返回 Opus chunk
|
||||
};
|
||||
|
||||
/// TTS 服务客户端代理
|
||||
@protocol TTSServiceClientDelegate <NSObject>
|
||||
@optional
|
||||
/// 收到音频 URL(模式 A)
|
||||
- (void)ttsClientDidReceiveURL:(NSURL *)url segmentId:(NSString *)segmentId;
|
||||
/// 收到音频数据块(模式 B/C/D)
|
||||
- (void)ttsClientDidReceiveAudioChunk:(NSData *)chunk
|
||||
payloadType:(TTSPayloadType)type
|
||||
segmentId:(NSString *)segmentId;
|
||||
/// 片段完成
|
||||
- (void)ttsClientDidFinishSegment:(NSString *)segmentId;
|
||||
/// 请求失败
|
||||
- (void)ttsClientDidFail:(NSError *)error;
|
||||
@end
|
||||
|
||||
/// TTS 服务客户端
|
||||
/// 统一网络层接口,支持多种 TTS 返回形态
|
||||
@interface TTSServiceClient : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<TTSServiceClientDelegate> delegate;
|
||||
|
||||
/// TTS 服务器 URL
|
||||
@property(nonatomic, copy) NSString *serverURL;
|
||||
|
||||
/// 语音 ID(ElevenLabs voice ID)
|
||||
@property(nonatomic, copy) NSString *voiceId;
|
||||
|
||||
/// 语言代码(如 "zh", "en")
|
||||
@property(nonatomic, copy) NSString *languageCode;
|
||||
|
||||
/// 当前期望的返回类型(由服务端配置决定)
|
||||
@property(nonatomic, assign) TTSPayloadType expectedPayloadType;
|
||||
|
||||
/// 是否正在请求
|
||||
@property(nonatomic, assign, readonly, getter=isRequesting) BOOL requesting;
|
||||
|
||||
/// 请求 TTS 合成
|
||||
/// @param text 要合成的文本
|
||||
/// @param segmentId 片段 ID(用于标识和排序)
|
||||
- (void)requestTTSForText:(NSString *)text segmentId:(NSString *)segmentId;
|
||||
|
||||
/// 取消所有请求
|
||||
- (void)cancel;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,302 +0,0 @@
|
||||
//
|
||||
// TTSServiceClient.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/15.
|
||||
//
|
||||
|
||||
#import "TTSServiceClient.h"
|
||||
|
||||
@interface TTSServiceClient () <NSURLSessionDataDelegate,
|
||||
NSURLSessionWebSocketDelegate>
|
||||
|
||||
@property(nonatomic, strong) NSURLSession *urlSession;
|
||||
@property(nonatomic, strong)
|
||||
NSMutableDictionary<NSString *, NSURLSessionTask *> *activeTasks;
|
||||
@property(nonatomic, strong) dispatch_queue_t networkQueue;
|
||||
@property(nonatomic, assign) BOOL requesting;
|
||||
|
||||
@end
|
||||
|
||||
@implementation TTSServiceClient
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.tts.network",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
_activeTasks = [[NSMutableDictionary alloc] init];
|
||||
_expectedPayloadType = TTSPayloadTypeURL; // 默认 URL 模式
|
||||
// TODO: 替换为实际的 TTS 服务器地址
|
||||
_serverURL = @"https://your-tts-server.com/api/tts";
|
||||
|
||||
[self setupSession];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupSession {
|
||||
NSURLSessionConfiguration *config =
|
||||
[NSURLSessionConfiguration defaultSessionConfiguration];
|
||||
config.timeoutIntervalForRequest = 30;
|
||||
config.timeoutIntervalForResource = 120;
|
||||
|
||||
self.urlSession = [NSURLSession sessionWithConfiguration:config
|
||||
delegate:self
|
||||
delegateQueue:nil];
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self cancel];
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (void)requestTTSForText:(NSString *)text segmentId:(NSString *)segmentId {
|
||||
if (!text || text.length == 0 || !segmentId) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
self.requesting = YES;
|
||||
|
||||
switch (self.expectedPayloadType) {
|
||||
case TTSPayloadTypeURL:
|
||||
[self requestURLMode:text segmentId:segmentId];
|
||||
break;
|
||||
case TTSPayloadTypePCMChunk:
|
||||
case TTSPayloadTypeAACChunk:
|
||||
case TTSPayloadTypeOpusChunk:
|
||||
[self requestStreamMode:text segmentId:segmentId];
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)cancel {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
for (NSURLSessionTask *task in self.activeTasks.allValues) {
|
||||
[task cancel];
|
||||
}
|
||||
[self.activeTasks removeAllObjects];
|
||||
self.requesting = NO;
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - URL Mode (Mode A)
|
||||
|
||||
- (void)requestURLMode:(NSString *)text segmentId:(NSString *)segmentId {
|
||||
NSURL *url = [NSURL URLWithString:self.serverURL];
|
||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
||||
request.HTTPMethod = @"POST";
|
||||
[request setValue:@"application/json" forHTTPHeaderField:@"Content-Type"];
|
||||
|
||||
NSDictionary *body = @{
|
||||
@"text" : text,
|
||||
@"segmentId" : segmentId,
|
||||
@"voiceId" : self.voiceId ?: @"JBFqnCBsd6RMkjVDRZzb",
|
||||
@"languageCode" : self.languageCode ?: @"zh",
|
||||
@"format" : @"mp3" // 或 m4a
|
||||
};
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:body
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
request.HTTPBody = jsonData;
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
NSURLSessionDataTask *task = [self.urlSession
|
||||
dataTaskWithRequest:request
|
||||
completionHandler:^(NSData *_Nullable data,
|
||||
NSURLResponse *_Nullable response,
|
||||
NSError *_Nullable error) {
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf)
|
||||
return;
|
||||
|
||||
dispatch_async(strongSelf.networkQueue, ^{
|
||||
[strongSelf.activeTasks removeObjectForKey:segmentId];
|
||||
|
||||
if (error) {
|
||||
if (error.code != NSURLErrorCancelled) {
|
||||
[strongSelf reportError:error];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// 解析响应
|
||||
NSError *parseError = nil;
|
||||
NSDictionary *json =
|
||||
[NSJSONSerialization JSONObjectWithData:data
|
||||
options:0
|
||||
error:&parseError];
|
||||
if (parseError) {
|
||||
[strongSelf reportError:parseError];
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *audioURLString = json[@"audioUrl"];
|
||||
if (audioURLString) {
|
||||
NSURL *audioURL = [NSURL URLWithString:audioURLString];
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([strongSelf.delegate respondsToSelector:@selector
|
||||
(ttsClientDidReceiveURL:segmentId:)]) {
|
||||
[strongSelf.delegate ttsClientDidReceiveURL:audioURL
|
||||
segmentId:segmentId];
|
||||
}
|
||||
if ([strongSelf.delegate respondsToSelector:@selector
|
||||
(ttsClientDidFinishSegment:)]) {
|
||||
[strongSelf.delegate ttsClientDidFinishSegment:segmentId];
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}];
|
||||
|
||||
self.activeTasks[segmentId] = task;
|
||||
[task resume];
|
||||
|
||||
NSLog(@"[TTSServiceClient] URL mode request for segment: %@", segmentId);
|
||||
}
|
||||
|
||||
#pragma mark - Stream Mode (Mode B/C/D)
|
||||
|
||||
- (void)requestStreamMode:(NSString *)text segmentId:(NSString *)segmentId {
|
||||
// WebSocket 连接用于流式接收
|
||||
NSString *wsURL =
|
||||
[self.serverURL stringByReplacingOccurrencesOfString:@"https://"
|
||||
withString:@"wss://"];
|
||||
wsURL = [wsURL stringByReplacingOccurrencesOfString:@"http://"
|
||||
withString:@"ws://"];
|
||||
wsURL = [wsURL stringByAppendingString:@"/stream"];
|
||||
|
||||
NSURL *url = [NSURL URLWithString:wsURL];
|
||||
NSURLSessionWebSocketTask *wsTask =
|
||||
[self.urlSession webSocketTaskWithURL:url];
|
||||
|
||||
self.activeTasks[segmentId] = wsTask;
|
||||
[wsTask resume];
|
||||
|
||||
// 发送请求
|
||||
NSDictionary *requestDict = @{
|
||||
@"text" : text,
|
||||
@"segmentId" : segmentId,
|
||||
@"voiceId" : self.voiceId ?: @"JBFqnCBsd6RMkjVDRZzb",
|
||||
@"languageCode" : self.languageCode ?: @"zh",
|
||||
@"format" : [self formatStringForPayloadType:self.expectedPayloadType]
|
||||
};
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:requestDict
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *jsonString = [[NSString alloc] initWithData:jsonData
|
||||
encoding:NSUTF8StringEncoding];
|
||||
NSURLSessionWebSocketMessage *message =
|
||||
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[wsTask sendMessage:message
|
||||
completionHandler:^(NSError *_Nullable error) {
|
||||
if (error) {
|
||||
[weakSelf reportError:error];
|
||||
} else {
|
||||
[weakSelf receiveStreamMessage:wsTask segmentId:segmentId];
|
||||
}
|
||||
}];
|
||||
|
||||
NSLog(@"[TTSServiceClient] Stream mode request for segment: %@", segmentId);
|
||||
}
|
||||
|
||||
- (void)receiveStreamMessage:(NSURLSessionWebSocketTask *)wsTask
|
||||
segmentId:(NSString *)segmentId {
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[wsTask receiveMessageWithCompletionHandler:^(
|
||||
NSURLSessionWebSocketMessage *_Nullable message,
|
||||
NSError *_Nullable error) {
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf)
|
||||
return;
|
||||
|
||||
if (error) {
|
||||
if (error.code != NSURLErrorCancelled && error.code != 57) {
|
||||
[strongSelf reportError:error];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type == NSURLSessionWebSocketMessageTypeData) {
|
||||
// 音频数据块
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([strongSelf.delegate respondsToSelector:@selector
|
||||
(ttsClientDidReceiveAudioChunk:
|
||||
payloadType:segmentId:)]) {
|
||||
[strongSelf.delegate
|
||||
ttsClientDidReceiveAudioChunk:message.data
|
||||
payloadType:strongSelf.expectedPayloadType
|
||||
segmentId:segmentId];
|
||||
}
|
||||
});
|
||||
|
||||
// 继续接收
|
||||
[strongSelf receiveStreamMessage:wsTask segmentId:segmentId];
|
||||
} else if (message.type == NSURLSessionWebSocketMessageTypeString) {
|
||||
// 控制消息
|
||||
NSData *data = [message.string dataUsingEncoding:NSUTF8StringEncoding];
|
||||
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data
|
||||
options:0
|
||||
error:nil];
|
||||
|
||||
if ([json[@"type"] isEqualToString:@"done"]) {
|
||||
dispatch_async(strongSelf.networkQueue, ^{
|
||||
[strongSelf.activeTasks removeObjectForKey:segmentId];
|
||||
});
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([strongSelf.delegate
|
||||
respondsToSelector:@selector(ttsClientDidFinishSegment:)]) {
|
||||
[strongSelf.delegate ttsClientDidFinishSegment:segmentId];
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// 继续接收
|
||||
[strongSelf receiveStreamMessage:wsTask segmentId:segmentId];
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (NSString *)formatStringForPayloadType:(TTSPayloadType)type {
|
||||
switch (type) {
|
||||
case TTSPayloadTypePCMChunk:
|
||||
return @"pcm";
|
||||
case TTSPayloadTypeAACChunk:
|
||||
return @"aac";
|
||||
case TTSPayloadTypeOpusChunk:
|
||||
return @"opus";
|
||||
default:
|
||||
return @"mp3";
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Error Reporting
|
||||
|
||||
- (void)reportError:(NSError *)error {
|
||||
self.requesting = NO;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector(ttsClientDidFail:)]) {
|
||||
[self.delegate ttsClientDidFail:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,53 +0,0 @@
|
||||
//
|
||||
// VoiceChatStreamingManager.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/21.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@protocol VoiceChatStreamingManagerDelegate <NSObject>
|
||||
@optional
|
||||
- (void)voiceChatStreamingManagerDidConnect;
|
||||
- (void)voiceChatStreamingManagerDidDisconnect:(NSError *_Nullable)error;
|
||||
- (void)voiceChatStreamingManagerDidStartSession:(NSString *)sessionId;
|
||||
- (void)voiceChatStreamingManagerDidStartTurn:(NSInteger)turnIndex;
|
||||
- (void)voiceChatStreamingManagerDidReceiveEagerEndOfTurnWithTranscript:(NSString *)text
|
||||
confidence:(double)confidence;
|
||||
- (void)voiceChatStreamingManagerDidResumeTurn;
|
||||
- (void)voiceChatStreamingManagerDidUpdateRMS:(float)rms;
|
||||
- (void)voiceChatStreamingManagerDidReceiveInterimTranscript:(NSString *)text;
|
||||
- (void)voiceChatStreamingManagerDidReceiveFinalTranscript:(NSString *)text;
|
||||
- (void)voiceChatStreamingManagerDidReceiveLLMStart;
|
||||
- (void)voiceChatStreamingManagerDidReceiveLLMToken:(NSString *)token;
|
||||
- (void)voiceChatStreamingManagerDidReceiveAudioChunk:(NSData *)audioData;
|
||||
- (void)voiceChatStreamingManagerDidCompleteWithTranscript:(NSString *)transcript
|
||||
aiResponse:(NSString *)aiResponse;
|
||||
- (void)voiceChatStreamingManagerDidFail:(NSError *)error;
|
||||
@end
|
||||
|
||||
/// Manager for realtime recording and streaming.
|
||||
@interface VoiceChatStreamingManager : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<VoiceChatStreamingManagerDelegate> delegate;
|
||||
|
||||
/// Base WebSocket URL, e.g. wss://api.yourdomain.com/api/ws/chat
|
||||
@property(nonatomic, copy) NSString *serverURL;
|
||||
|
||||
@property(nonatomic, assign, readonly, getter=isStreaming) BOOL streaming;
|
||||
@property(nonatomic, copy, readonly, nullable) NSString *sessionId;
|
||||
|
||||
- (void)startWithToken:(NSString *)token
|
||||
language:(nullable NSString *)language
|
||||
voiceId:(nullable NSString *)voiceId;
|
||||
|
||||
- (void)stopAndFinalize;
|
||||
- (void)cancel;
|
||||
- (void)disconnect;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,380 +0,0 @@
|
||||
//
|
||||
// VoiceChatStreamingManager.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/21.
|
||||
//
|
||||
|
||||
#import "VoiceChatStreamingManager.h"
|
||||
#import "AudioCaptureManager.h"
|
||||
#import "AudioSessionManager.h"
|
||||
#import "VoiceChatWebSocketClient.h"
|
||||
|
||||
static NSString *const kVoiceChatStreamingManagerErrorDomain =
|
||||
@"VoiceChatStreamingManager";
|
||||
|
||||
@interface VoiceChatStreamingManager () <AudioSessionManagerDelegate,
|
||||
AudioCaptureManagerDelegate,
|
||||
VoiceChatWebSocketClientDelegate>
|
||||
|
||||
@property(nonatomic, strong) AudioSessionManager *audioSession;
|
||||
@property(nonatomic, strong) AudioCaptureManager *audioCapture;
|
||||
@property(nonatomic, strong) VoiceChatWebSocketClient *webSocketClient;
|
||||
@property(nonatomic, strong) dispatch_queue_t stateQueue;
|
||||
|
||||
@property(nonatomic, assign) BOOL streaming;
|
||||
@property(nonatomic, copy) NSString *sessionId;
|
||||
|
||||
@property(nonatomic, copy) NSString *pendingToken;
|
||||
@property(nonatomic, copy) NSString *pendingLanguage;
|
||||
@property(nonatomic, copy) NSString *pendingVoiceId;
|
||||
|
||||
@end
|
||||
|
||||
@implementation VoiceChatStreamingManager
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_stateQueue = dispatch_queue_create("com.keyboard.aitalk.voicechat.manager",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
|
||||
_audioSession = [AudioSessionManager sharedManager];
|
||||
_audioSession.delegate = self;
|
||||
|
||||
_audioCapture = [[AudioCaptureManager alloc] init];
|
||||
_audioCapture.delegate = self;
|
||||
|
||||
_webSocketClient = [[VoiceChatWebSocketClient alloc] init];
|
||||
_webSocketClient.delegate = self;
|
||||
|
||||
_serverURL = @"ws://192.168.2.21:7529/api/ws/chat?token=";
|
||||
_webSocketClient.serverURL = _serverURL;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self disconnectInternal];
|
||||
}
|
||||
|
||||
- (void)setServerURL:(NSString *)serverURL {
|
||||
_serverURL = [serverURL copy];
|
||||
self.webSocketClient.serverURL = _serverURL;
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (void)startWithToken:(NSString *)token
|
||||
language:(nullable NSString *)language
|
||||
voiceId:(nullable NSString *)voiceId {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
self.pendingToken = token ?: @"";
|
||||
self.pendingLanguage = language ?: @"";
|
||||
self.pendingVoiceId = voiceId ?: @"";
|
||||
[self.webSocketClient disableAudioSending];
|
||||
[self startInternal];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)stopAndFinalize {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
if (self.streaming) {
|
||||
[self.audioCapture stopCapture];
|
||||
self.streaming = NO;
|
||||
}
|
||||
[self.webSocketClient disableAudioSending];
|
||||
[self.webSocketClient endAudio];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)cancel {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
if (self.streaming) {
|
||||
[self.audioCapture stopCapture];
|
||||
self.streaming = NO;
|
||||
}
|
||||
[self.webSocketClient disableAudioSending];
|
||||
[self.webSocketClient cancel];
|
||||
self.sessionId = nil;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)disconnect {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
[self disconnectInternal];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)disconnectInternal {
|
||||
if (self.streaming) {
|
||||
[self.audioCapture stopCapture];
|
||||
self.streaming = NO;
|
||||
}
|
||||
[self.webSocketClient disableAudioSending];
|
||||
[self.webSocketClient disconnect];
|
||||
[self.audioSession deactivateSession];
|
||||
self.sessionId = nil;
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (void)startInternal {
|
||||
if (self.pendingToken.length == 0) {
|
||||
NSLog(@"[VoiceChatStreamingManager] Start failed: token is empty");
|
||||
[self reportErrorWithMessage:@"Token is required"];
|
||||
return;
|
||||
}
|
||||
|
||||
if (![self.audioSession hasMicrophonePermission]) {
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[self.audioSession requestMicrophonePermission:^(BOOL granted) {
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf) {
|
||||
return;
|
||||
}
|
||||
if (!granted) {
|
||||
[strongSelf reportErrorWithMessage:@"Microphone permission denied"];
|
||||
return;
|
||||
}
|
||||
dispatch_async(strongSelf.stateQueue, ^{
|
||||
[strongSelf startInternal];
|
||||
});
|
||||
}];
|
||||
return;
|
||||
}
|
||||
|
||||
NSError *error = nil;
|
||||
if (![self.audioSession configureForConversation:&error]) {
|
||||
[self reportError:error];
|
||||
return;
|
||||
}
|
||||
|
||||
if (![self.audioSession activateSession:&error]) {
|
||||
[self reportError:error];
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.serverURL.length == 0) {
|
||||
NSLog(@"[VoiceChatStreamingManager] Start failed: server URL is empty");
|
||||
[self reportErrorWithMessage:@"Server URL is required"];
|
||||
return;
|
||||
}
|
||||
|
||||
NSLog(@"[VoiceChatStreamingManager] Start streaming, server: %@",
|
||||
self.serverURL);
|
||||
self.webSocketClient.serverURL = self.serverURL;
|
||||
[self.webSocketClient connectWithToken:self.pendingToken];
|
||||
}
|
||||
|
||||
- (void)reportError:(NSError *)error {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidFail:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidFail:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)reportErrorWithMessage:(NSString *)message {
|
||||
NSError *error = [NSError errorWithDomain:kVoiceChatStreamingManagerErrorDomain
|
||||
code:-1
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey : message ?: @""
|
||||
}];
|
||||
[self reportError:error];
|
||||
}
|
||||
|
||||
#pragma mark - AudioCaptureManagerDelegate
|
||||
|
||||
- (void)audioCaptureManagerDidOutputPCMFrame:(NSData *)pcmFrame {
|
||||
if (!self.streaming) {
|
||||
return;
|
||||
}
|
||||
[self.webSocketClient sendAudioPCMFrame:pcmFrame];
|
||||
}
|
||||
|
||||
- (void)audioCaptureManagerDidUpdateRMS:(float)rms {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidUpdateRMS:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidUpdateRMS:rms];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - AudioSessionManagerDelegate
|
||||
|
||||
- (void)audioSessionManagerDidInterrupt:(KBAudioSessionInterruptionType)type {
|
||||
if (type == KBAudioSessionInterruptionTypeBegan) {
|
||||
[self cancel];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)audioSessionManagerMicrophonePermissionDenied {
|
||||
[self reportErrorWithMessage:@"Microphone permission denied"];
|
||||
}
|
||||
|
||||
#pragma mark - VoiceChatWebSocketClientDelegate
|
||||
|
||||
- (void)voiceChatClientDidConnect {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
[self.webSocketClient startSessionWithLanguage:self.pendingLanguage
|
||||
voiceId:self.pendingVoiceId];
|
||||
});
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidConnect)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidConnect];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidDisconnect:(NSError *_Nullable)error {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
if (self.streaming) {
|
||||
[self.audioCapture stopCapture];
|
||||
self.streaming = NO;
|
||||
}
|
||||
[self.audioSession deactivateSession];
|
||||
self.sessionId = nil;
|
||||
});
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidDisconnect:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidDisconnect:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidStartSession:(NSString *)sessionId {
|
||||
dispatch_async(self.stateQueue, ^{
|
||||
self.sessionId = sessionId;
|
||||
|
||||
NSError *error = nil;
|
||||
if (![self.audioCapture startCapture:&error]) {
|
||||
[self reportError:error];
|
||||
[self.webSocketClient cancel];
|
||||
return;
|
||||
}
|
||||
|
||||
self.streaming = YES;
|
||||
[self.webSocketClient enableAudioSending];
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidStartSession:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidStartSession:sessionId];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidStartTurn:(NSInteger)turnIndex {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidStartTurn:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidStartTurn:turnIndex];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:(NSString *)text
|
||||
confidence:(double)confidence {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidReceiveEagerEndOfTurnWithTranscript:
|
||||
confidence:)]) {
|
||||
[self.delegate
|
||||
voiceChatStreamingManagerDidReceiveEagerEndOfTurnWithTranscript:text
|
||||
confidence:confidence];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidResumeTurn {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidResumeTurn)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidResumeTurn];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveInterimTranscript:(NSString *)text {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidReceiveInterimTranscript:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidReceiveInterimTranscript:text];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveFinalTranscript:(NSString *)text {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidReceiveFinalTranscript:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidReceiveFinalTranscript:text];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveLLMStart {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidReceiveLLMStart)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidReceiveLLMStart];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveLLMToken:(NSString *)token {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidReceiveLLMToken:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidReceiveLLMToken:token];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveAudioChunk:(NSData *)audioData {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidReceiveAudioChunk:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidReceiveAudioChunk:audioData];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidCompleteWithTranscript:(NSString *)transcript
|
||||
aiResponse:(NSString *)aiResponse {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatStreamingManagerDidCompleteWithTranscript:
|
||||
aiResponse:)]) {
|
||||
[self.delegate voiceChatStreamingManagerDidCompleteWithTranscript:transcript
|
||||
aiResponse:aiResponse];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidReceiveErrorCode:(NSString *)code
|
||||
message:(NSString *)message {
|
||||
NSString *desc = message.length > 0 ? message : @"Server error";
|
||||
NSError *error = [NSError errorWithDomain:kVoiceChatStreamingManagerErrorDomain
|
||||
code:-2
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey : desc,
|
||||
@"code" : code ?: @""
|
||||
}];
|
||||
[self reportError:error];
|
||||
}
|
||||
|
||||
- (void)voiceChatClientDidFail:(NSError *)error {
|
||||
[self reportError:error];
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -1,57 +0,0 @@
|
||||
//
|
||||
// VoiceChatWebSocketClient.h
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/21.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@protocol VoiceChatWebSocketClientDelegate <NSObject>
|
||||
@optional
|
||||
- (void)voiceChatClientDidConnect;
|
||||
- (void)voiceChatClientDidDisconnect:(NSError *_Nullable)error;
|
||||
- (void)voiceChatClientDidStartSession:(NSString *)sessionId;
|
||||
- (void)voiceChatClientDidStartTurn:(NSInteger)turnIndex;
|
||||
- (void)voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:(NSString *)text
|
||||
confidence:(double)confidence;
|
||||
- (void)voiceChatClientDidResumeTurn;
|
||||
- (void)voiceChatClientDidReceiveInterimTranscript:(NSString *)text;
|
||||
- (void)voiceChatClientDidReceiveFinalTranscript:(NSString *)text;
|
||||
- (void)voiceChatClientDidReceiveLLMStart;
|
||||
- (void)voiceChatClientDidReceiveLLMToken:(NSString *)token;
|
||||
- (void)voiceChatClientDidReceiveAudioChunk:(NSData *)audioData;
|
||||
- (void)voiceChatClientDidCompleteWithTranscript:(NSString *)transcript
|
||||
aiResponse:(NSString *)aiResponse;
|
||||
- (void)voiceChatClientDidReceiveErrorCode:(NSString *)code
|
||||
message:(NSString *)message;
|
||||
- (void)voiceChatClientDidFail:(NSError *)error;
|
||||
@end
|
||||
|
||||
/// WebSocket client for realtime voice chat.
|
||||
@interface VoiceChatWebSocketClient : NSObject
|
||||
|
||||
@property(nonatomic, weak) id<VoiceChatWebSocketClientDelegate> delegate;
|
||||
|
||||
/// Base WebSocket URL, e.g. wss://api.yourdomain.com/api/ws/chat
|
||||
@property(nonatomic, copy) NSString *serverURL;
|
||||
|
||||
@property(nonatomic, assign, readonly, getter=isConnected) BOOL connected;
|
||||
@property(nonatomic, copy, readonly, nullable) NSString *sessionId;
|
||||
|
||||
- (void)connectWithToken:(NSString *)token;
|
||||
- (void)disconnect;
|
||||
|
||||
- (void)startSessionWithLanguage:(nullable NSString *)language
|
||||
voiceId:(nullable NSString *)voiceId;
|
||||
- (void)enableAudioSending;
|
||||
- (void)disableAudioSending;
|
||||
- (void)sendAudioPCMFrame:(NSData *)pcmFrame;
|
||||
- (void)endAudio;
|
||||
- (void)cancel;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
@@ -1,459 +0,0 @@
|
||||
//
|
||||
// VoiceChatWebSocketClient.m
|
||||
// keyBoard
|
||||
//
|
||||
// Created by Mac on 2026/1/21.
|
||||
//
|
||||
|
||||
#import "VoiceChatWebSocketClient.h"
|
||||
|
||||
static NSString *const kVoiceChatWebSocketClientErrorDomain =
|
||||
@"VoiceChatWebSocketClient";
|
||||
|
||||
@interface VoiceChatWebSocketClient () <NSURLSessionWebSocketDelegate>
|
||||
|
||||
@property(nonatomic, strong) NSURLSession *urlSession;
|
||||
@property(nonatomic, strong) NSURLSessionWebSocketTask *webSocketTask;
|
||||
@property(nonatomic, strong) dispatch_queue_t networkQueue;
|
||||
@property(nonatomic, assign) BOOL connected;
|
||||
@property(nonatomic, copy) NSString *sessionId;
|
||||
@property(nonatomic, assign) BOOL audioSendingEnabled;
|
||||
|
||||
@end
|
||||
|
||||
@implementation VoiceChatWebSocketClient
|
||||
|
||||
- (instancetype)init {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_networkQueue = dispatch_queue_create("com.keyboard.aitalk.voicechat.ws",
|
||||
DISPATCH_QUEUE_SERIAL);
|
||||
_serverURL = @"wss://api.yourdomain.com/api/ws/chat";
|
||||
_audioSendingEnabled = NO;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self disconnectInternal];
|
||||
}
|
||||
|
||||
#pragma mark - Public Methods
|
||||
|
||||
- (void)connectWithToken:(NSString *)token {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
[self disconnectInternal];
|
||||
|
||||
NSURL *url = [self buildURLWithToken:token];
|
||||
if (!url) {
|
||||
[self reportErrorWithMessage:@"Invalid server URL"];
|
||||
return;
|
||||
}
|
||||
|
||||
NSLog(@"[VoiceChatWebSocketClient] Connecting: %@", url.absoluteString);
|
||||
|
||||
NSURLSessionConfiguration *config =
|
||||
[NSURLSessionConfiguration defaultSessionConfiguration];
|
||||
config.timeoutIntervalForRequest = 30;
|
||||
config.timeoutIntervalForResource = 300;
|
||||
|
||||
self.urlSession = [NSURLSession sessionWithConfiguration:config
|
||||
delegate:self
|
||||
delegateQueue:nil];
|
||||
|
||||
self.webSocketTask = [self.urlSession webSocketTaskWithURL:url];
|
||||
[self.webSocketTask resume];
|
||||
[self receiveMessage];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)disconnect {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
BOOL shouldNotify = self.webSocketTask != nil;
|
||||
if (shouldNotify) {
|
||||
NSLog(@"[VoiceChatWebSocketClient] Disconnect requested");
|
||||
}
|
||||
[self disconnectInternal];
|
||||
if (shouldNotify) {
|
||||
[self notifyDisconnect:nil];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)startSessionWithLanguage:(nullable NSString *)language
|
||||
voiceId:(nullable NSString *)voiceId {
|
||||
NSMutableDictionary *message = [NSMutableDictionary dictionary];
|
||||
message[@"type"] = @"session_start";
|
||||
|
||||
NSMutableDictionary *config = [NSMutableDictionary dictionary];
|
||||
if (language.length > 0) {
|
||||
config[@"language"] = language;
|
||||
}
|
||||
if (voiceId.length > 0) {
|
||||
config[@"voice_id"] = voiceId;
|
||||
}
|
||||
if (config.count > 0) {
|
||||
message[@"config"] = config;
|
||||
}
|
||||
|
||||
NSLog(@"[VoiceChatWebSocketClient] Sending session_start: %@",
|
||||
message);
|
||||
[self sendJSON:message];
|
||||
}
|
||||
|
||||
- (void)enableAudioSending {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
self.audioSendingEnabled = YES;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)disableAudioSending {
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
self.audioSendingEnabled = NO;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)sendAudioPCMFrame:(NSData *)pcmFrame {
|
||||
if (!self.connected || !self.webSocketTask || pcmFrame.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
if (!self.audioSendingEnabled) {
|
||||
return;
|
||||
}
|
||||
if (!self.connected || !self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
NSURLSessionWebSocketMessage *message =
|
||||
[[NSURLSessionWebSocketMessage alloc] initWithData:pcmFrame];
|
||||
[self.webSocketTask
|
||||
sendMessage:message
|
||||
completionHandler:^(NSError *_Nullable error) {
|
||||
if (error) {
|
||||
[self reportError:error];
|
||||
} else {
|
||||
NSLog(@"[VoiceChatWebSocketClient] Sent audio frame: %lu bytes",
|
||||
(unsigned long)pcmFrame.length);
|
||||
}
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)endAudio {
|
||||
NSLog(@"[VoiceChatWebSocketClient] Sending audio_end");
|
||||
[self sendJSON:@{ @"type" : @"audio_end" }];
|
||||
}
|
||||
|
||||
- (void)cancel {
|
||||
NSLog(@"[VoiceChatWebSocketClient] Sending cancel");
|
||||
[self sendJSON:@{ @"type" : @"cancel" }];
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (NSURL *)buildURLWithToken:(NSString *)token {
|
||||
if (self.serverURL.length == 0) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
NSURLComponents *components =
|
||||
[NSURLComponents componentsWithString:self.serverURL];
|
||||
if (!components) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
if (token.length > 0) {
|
||||
NSMutableArray<NSURLQueryItem *> *items =
|
||||
components.queryItems.mutableCopy ?: [NSMutableArray array];
|
||||
BOOL didReplace = NO;
|
||||
for (NSUInteger i = 0; i < items.count; i++) {
|
||||
NSURLQueryItem *item = items[i];
|
||||
if ([item.name isEqualToString:@"token"]) {
|
||||
items[i] = [NSURLQueryItem queryItemWithName:@"token" value:token];
|
||||
didReplace = YES;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!didReplace) {
|
||||
[items addObject:[NSURLQueryItem queryItemWithName:@"token"
|
||||
value:token]];
|
||||
}
|
||||
components.queryItems = items;
|
||||
}
|
||||
|
||||
return components.URL;
|
||||
}
|
||||
|
||||
- (void)sendJSON:(NSDictionary *)dict {
|
||||
if (!self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:dict
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *jsonString =
|
||||
[[NSString alloc] initWithData:jsonData
|
||||
encoding:NSUTF8StringEncoding];
|
||||
if (!jsonString) {
|
||||
[self reportErrorWithMessage:@"Failed to encode JSON message"];
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(self.networkQueue, ^{
|
||||
NSURLSessionWebSocketMessage *message =
|
||||
[[NSURLSessionWebSocketMessage alloc] initWithString:jsonString];
|
||||
[self.webSocketTask
|
||||
sendMessage:message
|
||||
completionHandler:^(NSError *_Nullable error) {
|
||||
if (error) {
|
||||
[self reportError:error];
|
||||
}
|
||||
}];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)receiveMessage {
|
||||
if (!self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[self.webSocketTask receiveMessageWithCompletionHandler:^(
|
||||
NSURLSessionWebSocketMessage *_Nullable message,
|
||||
NSError *_Nullable error) {
|
||||
__strong typeof(weakSelf) strongSelf = weakSelf;
|
||||
if (!strongSelf) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
if (error.code != NSURLErrorCancelled && error.code != 57) {
|
||||
[strongSelf notifyDisconnect:error];
|
||||
[strongSelf disconnectInternal];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type == NSURLSessionWebSocketMessageTypeString) {
|
||||
NSLog(@"[VoiceChatWebSocketClient] Received text: %@", message.string);
|
||||
[strongSelf handleTextMessage:message.string];
|
||||
} else if (message.type == NSURLSessionWebSocketMessageTypeData) {
|
||||
NSLog(@"[VoiceChatWebSocketClient] Received binary: %lu bytes",
|
||||
(unsigned long)message.data.length);
|
||||
[strongSelf handleBinaryMessage:message.data];
|
||||
}
|
||||
|
||||
[strongSelf receiveMessage];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleTextMessage:(NSString *)text {
|
||||
if (text.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
NSData *data = [text dataUsingEncoding:NSUTF8StringEncoding];
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
NSError *jsonError = nil;
|
||||
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data
|
||||
options:0
|
||||
error:&jsonError];
|
||||
if (jsonError) {
|
||||
[self reportError:jsonError];
|
||||
return;
|
||||
}
|
||||
|
||||
NSString *type = json[@"type"];
|
||||
if (type.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ([type isEqualToString:@"session_started"]) {
|
||||
NSString *sessionId = json[@"session_id"] ?: @"";
|
||||
self.sessionId = sessionId;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidStartSession:)]) {
|
||||
[self.delegate voiceChatClientDidStartSession:sessionId];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"transcript_interim"]) {
|
||||
NSString *transcript = json[@"text"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidReceiveInterimTranscript:)]) {
|
||||
[self.delegate voiceChatClientDidReceiveInterimTranscript:transcript];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"transcript_final"]) {
|
||||
NSString *transcript = json[@"text"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidReceiveFinalTranscript:)]) {
|
||||
[self.delegate voiceChatClientDidReceiveFinalTranscript:transcript];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"turn_start"]) {
|
||||
NSInteger turnIndex = [json[@"turn_index"] integerValue];
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidStartTurn:)]) {
|
||||
[self.delegate voiceChatClientDidStartTurn:turnIndex];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"eager_eot"]) {
|
||||
NSString *transcript = json[@"transcript"] ?: @"";
|
||||
double confidence = [json[@"confidence"] doubleValue];
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:
|
||||
confidence:)]) {
|
||||
[self.delegate
|
||||
voiceChatClientDidReceiveEagerEndOfTurnWithTranscript:transcript
|
||||
confidence:confidence];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"turn_resumed"]) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidResumeTurn)]) {
|
||||
[self.delegate voiceChatClientDidResumeTurn];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"llm_start"]) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(voiceChatClientDidReceiveLLMStart)]) {
|
||||
[self.delegate voiceChatClientDidReceiveLLMStart];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"llm_token"]) {
|
||||
NSString *token = json[@"token"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(voiceChatClientDidReceiveLLMToken:)]) {
|
||||
[self.delegate voiceChatClientDidReceiveLLMToken:token];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"complete"]) {
|
||||
NSString *transcript = json[@"transcript"] ?: @"";
|
||||
NSString *aiResponse = json[@"ai_response"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidCompleteWithTranscript:
|
||||
aiResponse:)]) {
|
||||
[self.delegate voiceChatClientDidCompleteWithTranscript:transcript
|
||||
aiResponse:aiResponse];
|
||||
}
|
||||
});
|
||||
} else if ([type isEqualToString:@"error"]) {
|
||||
NSString *code = json[@"code"] ?: @"";
|
||||
NSString *message = json[@"message"] ?: @"";
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidReceiveErrorCode:message:)]) {
|
||||
[self.delegate voiceChatClientDidReceiveErrorCode:code
|
||||
message:message];
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
- (void)handleBinaryMessage:(NSData *)data {
|
||||
if (data.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate
|
||||
respondsToSelector:@selector(voiceChatClientDidReceiveAudioChunk:)]) {
|
||||
[self.delegate voiceChatClientDidReceiveAudioChunk:data];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)disconnectInternal {
|
||||
self.connected = NO;
|
||||
self.sessionId = nil;
|
||||
self.audioSendingEnabled = NO;
|
||||
|
||||
if (self.webSocketTask) {
|
||||
[self.webSocketTask
|
||||
cancelWithCloseCode:NSURLSessionWebSocketCloseCodeNormalClosure
|
||||
reason:nil];
|
||||
self.webSocketTask = nil;
|
||||
}
|
||||
|
||||
if (self.urlSession) {
|
||||
[self.urlSession invalidateAndCancel];
|
||||
self.urlSession = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)reportError:(NSError *)error {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector(voiceChatClientDidFail:)]) {
|
||||
[self.delegate voiceChatClientDidFail:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)reportErrorWithMessage:(NSString *)message {
|
||||
NSError *error = [NSError errorWithDomain:kVoiceChatWebSocketClientErrorDomain
|
||||
code:-1
|
||||
userInfo:@{
|
||||
NSLocalizedDescriptionKey : message ?: @""
|
||||
}];
|
||||
[self reportError:error];
|
||||
}
|
||||
|
||||
- (void)notifyDisconnect:(NSError *_Nullable)error {
|
||||
self.connected = NO;
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector
|
||||
(voiceChatClientDidDisconnect:)]) {
|
||||
[self.delegate voiceChatClientDidDisconnect:error];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#pragma mark - NSURLSessionWebSocketDelegate
|
||||
|
||||
- (void)URLSession:(NSURLSession *)session
|
||||
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
|
||||
didOpenWithProtocol:(NSString *)protocol {
|
||||
self.connected = YES;
|
||||
NSLog(@"[VoiceChatWebSocketClient] Connected");
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
if ([self.delegate respondsToSelector:@selector(voiceChatClientDidConnect)]) {
|
||||
[self.delegate voiceChatClientDidConnect];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)URLSession:(NSURLSession *)session
|
||||
webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask
|
||||
didCloseWithCode:(NSURLSessionWebSocketCloseCode)closeCode
|
||||
reason:(NSData *)reason {
|
||||
if (!self.webSocketTask) {
|
||||
return;
|
||||
}
|
||||
NSLog(@"[VoiceChatWebSocketClient] Closed with code: %ld",
|
||||
(long)closeCode);
|
||||
[self notifyDisconnect:nil];
|
||||
[self disconnectInternal];
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -124,6 +124,13 @@
|
||||
for (NSInteger i = 0; i < self.buttons.count; i++) {
|
||||
UIButton *button = self.buttons[i];
|
||||
button.selected = (i == self.selectedIndex);
|
||||
|
||||
// 特殊处理:社区按钮(索引2)选中时文字颜色为白色
|
||||
if (i == 2 && button.selected) {
|
||||
[button setTitleColor:[UIColor whiteColor] forState:UIControlStateSelected];
|
||||
} else if (i == 2) {
|
||||
[button setTitleColor:[UIColor grayColor] forState:UIControlStateNormal];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@interface KBKeyboardStressTestVC : BaseViewController
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
142
keyBoard/Class/Home/VC/FunctionTest/KBKeyboardStressTestVC.m
Normal file
142
keyBoard/Class/Home/VC/FunctionTest/KBKeyboardStressTestVC.m
Normal file
@@ -0,0 +1,142 @@
|
||||
#import "KBKeyboardStressTestVC.h"
|
||||
|
||||
@interface KBKeyboardStressTestVC ()
|
||||
@property(nonatomic, strong) UITextView *textView;
|
||||
@property(nonatomic, strong) UIButton *startButton;
|
||||
@property(nonatomic, strong) UIButton *stopButton;
|
||||
@property(nonatomic, strong) UILabel *statusLabel;
|
||||
@property(nonatomic, assign) NSInteger currentCycle;
|
||||
@property(nonatomic, assign) NSInteger totalCycles;
|
||||
@property(nonatomic, assign) BOOL running;
|
||||
@end
|
||||
|
||||
@implementation KBKeyboardStressTestVC
|
||||
|
||||
- (void)viewDidLoad {
|
||||
[super viewDidLoad];
|
||||
self.view.backgroundColor = UIColor.whiteColor;
|
||||
self.title = @"键盘压力测试";
|
||||
self.totalCycles = 200;
|
||||
[self buildUI];
|
||||
[self updateStatus];
|
||||
}
|
||||
|
||||
- (void)viewWillDisappear:(BOOL)animated {
|
||||
[super viewWillDisappear:animated];
|
||||
[self stop];
|
||||
}
|
||||
|
||||
- (void)buildUI {
|
||||
CGFloat w = UIScreen.mainScreen.bounds.size.width;
|
||||
|
||||
self.statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(16, KB_NAV_TOTAL_HEIGHT + 16, w - 32, 22)];
|
||||
self.statusLabel.font = [UIFont systemFontOfSize:13];
|
||||
self.statusLabel.textColor = [UIColor colorWithWhite:0.2 alpha:1];
|
||||
[self.view addSubview:self.statusLabel];
|
||||
|
||||
self.textView = [[UITextView alloc] initWithFrame:CGRectMake(16, CGRectGetMaxY(self.statusLabel.frame) + 12, w - 32, 160)];
|
||||
self.textView.text = @"把系统输入法切到自定义键盘后,点击开始,会反复显示/隐藏键盘。";
|
||||
self.textView.layer.borderColor = [UIColor colorWithWhite:0 alpha:0.15].CGColor;
|
||||
self.textView.layer.borderWidth = 0.5;
|
||||
self.textView.layer.cornerRadius = 8;
|
||||
[self.view addSubview:self.textView];
|
||||
|
||||
CGFloat btnW = (w - 16 * 2 - 12) / 2.0;
|
||||
self.startButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
||||
self.startButton.frame = CGRectMake(16, CGRectGetMaxY(self.textView.frame) + 16, btnW, 44);
|
||||
self.startButton.layer.cornerRadius = 10;
|
||||
self.startButton.backgroundColor = [UIColor colorWithRed:0.22 green:0.49 blue:0.96 alpha:1];
|
||||
[self.startButton setTitleColor:UIColor.whiteColor forState:UIControlStateNormal];
|
||||
[self.startButton setTitle:@"开始" forState:UIControlStateNormal];
|
||||
[self.startButton addTarget:self action:@selector(onStart) forControlEvents:UIControlEventTouchUpInside];
|
||||
[self.view addSubview:self.startButton];
|
||||
|
||||
self.stopButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
||||
self.stopButton.frame = CGRectMake(CGRectGetMaxX(self.startButton.frame) + 12, CGRectGetMinY(self.startButton.frame), btnW, 44);
|
||||
self.stopButton.layer.cornerRadius = 10;
|
||||
self.stopButton.backgroundColor = [UIColor colorWithWhite:0.2 alpha:0.08];
|
||||
[self.stopButton setTitleColor:[UIColor colorWithWhite:0.15 alpha:1] forState:UIControlStateNormal];
|
||||
[self.stopButton setTitle:@"停止" forState:UIControlStateNormal];
|
||||
[self.stopButton addTarget:self action:@selector(onStop) forControlEvents:UIControlEventTouchUpInside];
|
||||
[self.view addSubview:self.stopButton];
|
||||
|
||||
UIButton *oneCycleBtn = [UIButton buttonWithType:UIButtonTypeSystem];
|
||||
oneCycleBtn.frame = CGRectMake(16, CGRectGetMaxY(self.startButton.frame) + 12, w - 32, 44);
|
||||
oneCycleBtn.layer.cornerRadius = 10;
|
||||
oneCycleBtn.backgroundColor = [UIColor colorWithWhite:0.2 alpha:0.08];
|
||||
[oneCycleBtn setTitleColor:[UIColor colorWithWhite:0.15 alpha:1] forState:UIControlStateNormal];
|
||||
[oneCycleBtn setTitle:@"执行 10 次" forState:UIControlStateNormal];
|
||||
[oneCycleBtn addTarget:self action:@selector(onRunTen) forControlEvents:UIControlEventTouchUpInside];
|
||||
[self.view addSubview:oneCycleBtn];
|
||||
}
|
||||
|
||||
- (void)onStart {
|
||||
self.totalCycles = 200;
|
||||
[self start];
|
||||
}
|
||||
|
||||
- (void)onRunTen {
|
||||
self.totalCycles = 10;
|
||||
[self start];
|
||||
}
|
||||
|
||||
- (void)onStop {
|
||||
[self stop];
|
||||
}
|
||||
|
||||
- (void)start {
|
||||
if (self.running) {
|
||||
return;
|
||||
}
|
||||
self.running = YES;
|
||||
self.currentCycle = 0;
|
||||
[self updateStatus];
|
||||
[self runNextCycle];
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
self.running = NO;
|
||||
[self.textView resignFirstResponder];
|
||||
[self updateStatus];
|
||||
}
|
||||
|
||||
- (void)runNextCycle {
|
||||
if (!self.running) {
|
||||
return;
|
||||
}
|
||||
if (self.currentCycle >= self.totalCycles) {
|
||||
[self stop];
|
||||
return;
|
||||
}
|
||||
|
||||
self.currentCycle += 1;
|
||||
[self updateStatus];
|
||||
|
||||
__weak typeof(self) weakSelf = self;
|
||||
[self.textView becomeFirstResponder];
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.18 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
__strong typeof(weakSelf) self = weakSelf;
|
||||
if (!self || !self.running) {
|
||||
return;
|
||||
}
|
||||
[self.textView resignFirstResponder];
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.12 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
__strong typeof(weakSelf) self = weakSelf;
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
[self runNextCycle];
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
- (void)updateStatus {
|
||||
NSString *run = self.running ? @"运行中" : @"未运行";
|
||||
self.statusLabel.text = [NSString stringWithFormat:@"状态:%@ | 进度:%ld/%ld", run, (long)self.currentCycle, (long)self.totalCycles];
|
||||
self.startButton.enabled = !self.running;
|
||||
self.startButton.alpha = self.running ? 0.5 : 1.0;
|
||||
self.stopButton.enabled = self.running;
|
||||
self.stopButton.alpha = self.running ? 1.0 : 0.5;
|
||||
}
|
||||
|
||||
@end
|
||||
@@ -96,7 +96,7 @@ static NSString * const kKBSvipBenefitHeaderId = @"kKBSvipBenefitHeaderId";
|
||||
// 过滤 level=2 的 SVIP 数据
|
||||
NSMutableArray<KBPayProductModel *> *svipProducts = [NSMutableArray array];
|
||||
for (KBPayProductModel *product in products) {
|
||||
if (product.level == 1) {
|
||||
if (product.level == 2) {
|
||||
[svipProducts addObject:product];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -135,7 +135,13 @@ static NSString * const kKBVipReviewListCellId = @"kKBVipReviewListCellId";
|
||||
if (tip.length) { [KBHUD showInfo:tip]; }
|
||||
return;
|
||||
}
|
||||
self.plans = products ?: @[];
|
||||
NSMutableArray<KBPayProductModel *> *vipProducts = [NSMutableArray array];
|
||||
for (KBPayProductModel *product in products) {
|
||||
if (product.level == 1) {
|
||||
[vipProducts addObject:product];
|
||||
}
|
||||
}
|
||||
self.plans = vipProducts ?: @[];
|
||||
self.selectedIndex = self.plans.count > 0 ? 0 : NSNotFound;
|
||||
[self.collectionView reloadData];
|
||||
[self prepareStoreKitWithPlans:self.plans];
|
||||
|
||||
Reference in New Issue
Block a user