Merge branch 'feature_0901'

# Conflicts:
#	apps/iOS/MNNLLMChat/MNNLLMiOS/Chat/Views/LLMChatView.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/ChatHistory/Services/ChatHistoryDatabase.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/MainTab/MainTabView.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/Service/ModelDownloader/ModelClient.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/Service/ModelDownloader/ModelDownloadConfiguration.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/Service/ModelDownloader/ModelDownloadManager.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/Service/ModelDownloader/ModelScopeDownloadManager.swift
#	apps/iOS/MNNLLMChat/MNNLLMiOS/Service/Util/String+Extension.swift
This commit is contained in:
游薪渝(揽清) 2025-09-05 15:41:53 +08:00
commit eda46aca14
45 changed files with 982 additions and 431 deletions

View File

@ -8,9 +8,10 @@
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
3E301C692D5C84730045E5E1 /* MNN.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E301C682D5C84730045E5E1 /* MNN.framework */; }; 3E301C692D5C84730045E5E1 /* MNN.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E301C682D5C84730045E5E1 /* MNN.framework */; };
3E6301D42E6A8A470004EC63 /* ExyteChat in Frameworks */ = {isa = PBXBuildFile; productRef = 3E6301D32E6A8A470004EC63 /* ExyteChat */; };
3E94DFF12D37DBA900BE39A7 /* SQLite in Frameworks */ = {isa = PBXBuildFile; productRef = 3E94DFF02D37DBA900BE39A7 /* SQLite */; }; 3E94DFF12D37DBA900BE39A7 /* SQLite in Frameworks */ = {isa = PBXBuildFile; productRef = 3E94DFF02D37DBA900BE39A7 /* SQLite */; };
3EC8DF852E4DCF5600861131 /* ExyteChat in Frameworks */ = {isa = PBXBuildFile; productRef = 3EC8DF842E4DCF5600861131 /* ExyteChat */; };
3EC8DF872E4DCF5800861131 /* Transformers in Frameworks */ = {isa = PBXBuildFile; productRef = 3EC8DF862E4DCF5800861131 /* Transformers */; }; 3EC8DF872E4DCF5800861131 /* Transformers in Frameworks */ = {isa = PBXBuildFile; productRef = 3EC8DF862E4DCF5800861131 /* Transformers */; };
3EDA70222E605BE700B17E48 /* ExyteChat in Frameworks */ = {isa = PBXBuildFile; productRef = 3EDA70212E605BE700B17E48 /* ExyteChat */; };
/* End PBXBuildFile section */ /* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */ /* Begin PBXContainerItemProxy section */
@ -84,10 +85,11 @@
isa = PBXFrameworksBuildPhase; isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
3EDA70222E605BE700B17E48 /* ExyteChat in Frameworks */,
3E301C692D5C84730045E5E1 /* MNN.framework in Frameworks */, 3E301C692D5C84730045E5E1 /* MNN.framework in Frameworks */,
3E6301D42E6A8A470004EC63 /* ExyteChat in Frameworks */,
3E94DFF12D37DBA900BE39A7 /* SQLite in Frameworks */, 3E94DFF12D37DBA900BE39A7 /* SQLite in Frameworks */,
3EC8DF872E4DCF5800861131 /* Transformers in Frameworks */, 3EC8DF872E4DCF5800861131 /* Transformers in Frameworks */,
3EC8DF852E4DCF5600861131 /* ExyteChat in Frameworks */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
@ -158,8 +160,9 @@
name = MNNLLMiOS; name = MNNLLMiOS;
packageProductDependencies = ( packageProductDependencies = (
3E94DFF02D37DBA900BE39A7 /* SQLite */, 3E94DFF02D37DBA900BE39A7 /* SQLite */,
3EC8DF842E4DCF5600861131 /* ExyteChat */,
3EC8DF862E4DCF5800861131 /* Transformers */, 3EC8DF862E4DCF5800861131 /* Transformers */,
3EDA70212E605BE700B17E48 /* ExyteChat */,
3E6301D32E6A8A470004EC63 /* ExyteChat */,
); );
productName = MNNLLMiOS; productName = MNNLLMiOS;
productReference = 3E8591FE2D1D45070067B46F /* MNNLLMiOS.app */; productReference = 3E8591FE2D1D45070067B46F /* MNNLLMiOS.app */;
@ -246,8 +249,8 @@
minimizedProjectReferenceProxies = 1; minimizedProjectReferenceProxies = 1;
packageReferences = ( packageReferences = (
3E94DF8A2D37D8FF00BE39A7 /* XCRemoteSwiftPackageReference "SQLite" */, 3E94DF8A2D37D8FF00BE39A7 /* XCRemoteSwiftPackageReference "SQLite" */,
3EC8DF822E4DCDF500861131 /* XCRemoteSwiftPackageReference "Chat" */,
3EC8DF832E4DCF4A00861131 /* XCRemoteSwiftPackageReference "swift-transformers" */, 3EC8DF832E4DCF4A00861131 /* XCRemoteSwiftPackageReference "swift-transformers" */,
3E6301D22E6A8A470004EC63 /* XCRemoteSwiftPackageReference "Chat" */,
); );
preferredProjectObjectVersion = 77; preferredProjectObjectVersion = 77;
productRefGroup = 3E8591FF2D1D45070067B46F /* Products */; productRefGroup = 3E8591FF2D1D45070067B46F /* Products */;
@ -357,7 +360,7 @@
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO; COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES; ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES; ENABLE_USER_SCRIPT_SANDBOXING = YES;
@ -451,6 +454,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = MNNLLMiOS/MNNLLMiOS.entitlements; CODE_SIGN_ENTITLEMENTS = MNNLLMiOS/MNNLLMiOS.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1; CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"MNNLLMiOS/Preview Content\""; DEVELOPMENT_ASSET_PATHS = "\"MNNLLMiOS/Preview Content\"";
@ -465,7 +469,7 @@
GENERATE_INFOPLIST_FILE = YES; GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = MNNLLMiOS/Info.plist; INFOPLIST_FILE = MNNLLMiOS/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "MNN Chat"; INFOPLIST_KEY_CFBundleDisplayName = "MNN Chat";
INFOPLIST_KEY_LSApplicationCategoryType = ""; INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSCameraUsageDescription = "We need access your camera to capture image or video"; INFOPLIST_KEY_NSCameraUsageDescription = "We need access your camera to capture image or video";
INFOPLIST_KEY_NSDocumentsFolderUsageDescription = "This app needs to access your documents to save downloaded models"; INFOPLIST_KEY_NSDocumentsFolderUsageDescription = "This app needs to access your documents to save downloaded models";
INFOPLIST_KEY_NSLocalNetworkUsageDescription = "This app needs to access your local network to download models"; INFOPLIST_KEY_NSLocalNetworkUsageDescription = "This app needs to access your local network to download models";
@ -479,8 +483,7 @@
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES; "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault; "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 17; IPHONEOS_DEPLOYMENT_TARGET = 17;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
@ -488,6 +491,7 @@
MARKETING_VERSION = 1.0; MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.taobao.mnnchat; PRODUCT_BUNDLE_IDENTIFIER = com.taobao.mnnchat;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SDKROOT = auto; SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO; SUPPORTS_MACCATALYST = NO;
@ -506,6 +510,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = MNNLLMiOS/MNNLLMiOS.entitlements; CODE_SIGN_ENTITLEMENTS = MNNLLMiOS/MNNLLMiOS.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic; CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1; CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"MNNLLMiOS/Preview Content\""; DEVELOPMENT_ASSET_PATHS = "\"MNNLLMiOS/Preview Content\"";
@ -520,7 +525,7 @@
GENERATE_INFOPLIST_FILE = YES; GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = MNNLLMiOS/Info.plist; INFOPLIST_FILE = MNNLLMiOS/Info.plist;
INFOPLIST_KEY_CFBundleDisplayName = "MNN Chat"; INFOPLIST_KEY_CFBundleDisplayName = "MNN Chat";
INFOPLIST_KEY_LSApplicationCategoryType = ""; INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSCameraUsageDescription = "We need access your camera to capture image or video"; INFOPLIST_KEY_NSCameraUsageDescription = "We need access your camera to capture image or video";
INFOPLIST_KEY_NSDocumentsFolderUsageDescription = "This app needs to access your documents to save downloaded models"; INFOPLIST_KEY_NSDocumentsFolderUsageDescription = "This app needs to access your documents to save downloaded models";
INFOPLIST_KEY_NSLocalNetworkUsageDescription = "This app needs to access your local network to download models"; INFOPLIST_KEY_NSLocalNetworkUsageDescription = "This app needs to access your local network to download models";
@ -534,15 +539,15 @@
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES; "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault; "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 17; IPHONEOS_DEPLOYMENT_TARGET = 17;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 14; MACOSX_DEPLOYMENT_TARGET = 14;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.taobao.mnnchat; PRODUCT_BUNDLE_IDENTIFIER = com.taobao.mnnchat;
MARKETING_VERSION = 1.0.1;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SDKROOT = auto; SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO; SUPPORTS_MACCATALYST = NO;
@ -562,6 +567,7 @@
CURRENT_PROJECT_VERSION = 1; CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 6QW92DF7RL; DEVELOPMENT_TEAM = 6QW92DF7RL;
GENERATE_INFOPLIST_FILE = YES; GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_NSCameraUsageDescription = "";
IPHONEOS_DEPLOYMENT_TARGET = 18.2; IPHONEOS_DEPLOYMENT_TARGET = 18.2;
MACOSX_DEPLOYMENT_TARGET = 15.2; MACOSX_DEPLOYMENT_TARGET = 15.2;
MARKETING_VERSION = 1.0; MARKETING_VERSION = 1.0;
@ -686,6 +692,14 @@
/* End XCConfigurationList section */ /* End XCConfigurationList section */
/* Begin XCRemoteSwiftPackageReference section */ /* Begin XCRemoteSwiftPackageReference section */
3E6301D22E6A8A470004EC63 /* XCRemoteSwiftPackageReference "Chat" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/Yogayu/Chat.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 1.0.1;
};
};
3E94DF8A2D37D8FF00BE39A7 /* XCRemoteSwiftPackageReference "SQLite" */ = { 3E94DF8A2D37D8FF00BE39A7 /* XCRemoteSwiftPackageReference "SQLite" */ = {
isa = XCRemoteSwiftPackageReference; isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/stephencelis/SQLite.swift"; repositoryURL = "https://github.com/stephencelis/SQLite.swift";
@ -694,14 +708,6 @@
minimumVersion = 0.15.3; minimumVersion = 0.15.3;
}; };
}; };
3EC8DF822E4DCDF500861131 /* XCRemoteSwiftPackageReference "Chat" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/Yogayu/Chat.git";
requirement = {
branch = main;
kind = branch;
};
};
3EC8DF832E4DCF4A00861131 /* XCRemoteSwiftPackageReference "swift-transformers" */ = { 3EC8DF832E4DCF4A00861131 /* XCRemoteSwiftPackageReference "swift-transformers" */ = {
isa = XCRemoteSwiftPackageReference; isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/huggingface/swift-transformers/"; repositoryURL = "https://github.com/huggingface/swift-transformers/";
@ -713,21 +719,25 @@
/* End XCRemoteSwiftPackageReference section */ /* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */ /* Begin XCSwiftPackageProductDependency section */
3E6301D32E6A8A470004EC63 /* ExyteChat */ = {
isa = XCSwiftPackageProductDependency;
package = 3E6301D22E6A8A470004EC63 /* XCRemoteSwiftPackageReference "Chat" */;
productName = ExyteChat;
};
3E94DFF02D37DBA900BE39A7 /* SQLite */ = { 3E94DFF02D37DBA900BE39A7 /* SQLite */ = {
isa = XCSwiftPackageProductDependency; isa = XCSwiftPackageProductDependency;
package = 3E94DF8A2D37D8FF00BE39A7 /* XCRemoteSwiftPackageReference "SQLite" */; package = 3E94DF8A2D37D8FF00BE39A7 /* XCRemoteSwiftPackageReference "SQLite" */;
productName = SQLite; productName = SQLite;
}; };
3EC8DF842E4DCF5600861131 /* ExyteChat */ = {
isa = XCSwiftPackageProductDependency;
package = 3EC8DF822E4DCDF500861131 /* XCRemoteSwiftPackageReference "Chat" */;
productName = ExyteChat;
};
3EC8DF862E4DCF5800861131 /* Transformers */ = { 3EC8DF862E4DCF5800861131 /* Transformers */ = {
isa = XCSwiftPackageProductDependency; isa = XCSwiftPackageProductDependency;
package = 3EC8DF832E4DCF4A00861131 /* XCRemoteSwiftPackageReference "swift-transformers" */; package = 3EC8DF832E4DCF4A00861131 /* XCRemoteSwiftPackageReference "swift-transformers" */;
productName = Transformers; productName = Transformers;
}; };
3EDA70212E605BE700B17E48 /* ExyteChat */ = {
isa = XCSwiftPackageProductDependency;
productName = ExyteChat;
};
/* End XCSwiftPackageProductDependency section */ /* End XCSwiftPackageProductDependency section */
}; };
rootObject = 3E8591F62D1D45070067B46F /* Project object */; rootObject = 3E8591F62D1D45070067B46F /* Project object */;

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "benchmark.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "benchmarkFill.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "home.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "homeFill.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "market.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "marketFill.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -8,7 +8,6 @@
import UIKit import UIKit
import ExyteChat import ExyteChat
import ExyteMediaPicker
final class LLMChatData { final class LLMChatData {
var assistant: LLMChatUser var assistant: LLMChatUser

View File

@ -6,7 +6,6 @@
// //
import ExyteChat import ExyteChat
import ExyteMediaPicker
actor LLMState { actor LLMState {
private var isProcessing: Bool = false private var isProcessing: Bool = false

View File

@ -9,7 +9,6 @@ import SwiftUI
import AVFoundation import AVFoundation
import ExyteChat import ExyteChat
import ExyteMediaPicker
final class LLMChatViewModel: ObservableObject { final class LLMChatViewModel: ObservableObject {
@ -48,12 +47,12 @@ final class LLMChatViewModel: ObservableObject {
} }
var chatCover: URL? { var chatCover: URL? {
interactor.otherSenders.count == 1 ? interactor.otherSenders.first!.avatar : nil interactor.otherSenders.count == 1 ? interactor.otherSenders.first?.avatar : nil
} }
private let interactor: LLMChatInteractor private let interactor: LLMChatInteractor
private var subscriptions = Set<AnyCancellable>() private var subscriptions = Set<AnyCancellable>()
var modelInfo: ModelInfo var modelInfo: ModelInfo
var history: ChatHistory? var history: ChatHistory?
private var historyId: String private var historyId: String
@ -65,6 +64,7 @@ final class LLMChatViewModel: ObservableObject {
} }
init(modelInfo: ModelInfo, history: ChatHistory? = nil) { init(modelInfo: ModelInfo, history: ChatHistory? = nil) {
print("yxy:: LLMChat View Model init")
self.modelInfo = modelInfo self.modelInfo = modelInfo
self.history = history self.history = history
self.historyId = history?.id ?? UUID().uuidString self.historyId = history?.id ?? UUID().uuidString
@ -77,7 +77,6 @@ final class LLMChatViewModel: ObservableObject {
// Check if model supports thinking mode // Check if model supports thinking mode
self.supportsThinkingMode = ModelUtils.isSupportThinkingSwitch(modelInfo.tags, modelName: modelInfo.modelName) self.supportsThinkingMode = ModelUtils.isSupportThinkingSwitch(modelInfo.tags, modelName: modelInfo.modelName)
} }
deinit { deinit {
@ -104,6 +103,7 @@ final class LLMChatViewModel: ObservableObject {
func setupLLM(modelPath: String) { func setupLLM(modelPath: String) {
Task { @MainActor in Task { @MainActor in
self.isModelLoaded = false
self.send(draft: DraftMessage( self.send(draft: DraftMessage(
text: NSLocalizedString("ModelLoadingText", comment: ""), text: NSLocalizedString("ModelLoadingText", comment: ""),
thinkText: "", thinkText: "",
@ -113,21 +113,21 @@ final class LLMChatViewModel: ObservableObject {
createdAt: Date() createdAt: Date()
), userType: .system) ), userType: .system)
} }
if modelInfo.modelName.lowercased().contains("diffusion") { if modelInfo.modelName.lowercased().contains("diffusion") {
diffusion = DiffusionSession(modelPath: modelPath, completion: { [weak self] success in diffusion = DiffusionSession(modelPath: modelPath, completion: { [weak self] success in
Task { @MainActor in Task { @MainActor in
print("Diffusion Model \(success)") print("Diffusion Model \(success)")
self?.isModelLoaded = success
self?.sendModelLoadStatus(success: success) self?.sendModelLoadStatus(success: success)
self?.isModelLoaded = success
} }
}) })
} else { } else {
llm = LLMInferenceEngineWrapper(modelPath: modelPath) { [weak self] success in llm = LLMInferenceEngineWrapper(modelPath: modelPath) { [weak self] success in
Task { @MainActor in Task { @MainActor in
self?.isModelLoaded = success
self?.sendModelLoadStatus(success: success) self?.sendModelLoadStatus(success: success)
self?.processHistoryMessages() self?.processHistoryMessages()
self?.isModelLoaded = success
// Configure thinking mode after model is loaded // Configure thinking mode after model is loaded
if success { if success {
@ -155,7 +155,7 @@ final class LLMChatViewModel: ObservableObject {
let modelLoadSuccessText = NSLocalizedString("ModelLoadingSuccessText", comment: "") let modelLoadSuccessText = NSLocalizedString("ModelLoadingSuccessText", comment: "")
let modelLoadFailText = NSLocalizedString("ModelLoadingFailText", comment: "") let modelLoadFailText = NSLocalizedString("ModelLoadingFailText", comment: "")
let loadResult = success ? modelLoadSuccessText : modelLoadFailText let loadResult = success ? modelLoadSuccessText : modelLoadFailText
self.send(draft: DraftMessage( self.send(draft: DraftMessage(
text: loadResult, text: loadResult,
thinkText: "", thinkText: "",
@ -196,6 +196,9 @@ final class LLMChatViewModel: ObservableObject {
NotificationCenter.default.post(name: .dismissKeyboard, object: nil) NotificationCenter.default.post(name: .dismissKeyboard, object: nil)
self.send(draft: draft, userType: .user) self.send(draft: draft, userType: .user)
recordModelUsage()
if isModelLoaded { if isModelLoaded {
if modelInfo.modelName.lowercased().contains("diffusion") { if modelInfo.modelName.lowercased().contains("diffusion") {
self.getDiffusionResponse(draft: draft) self.getDiffusionResponse(draft: draft)
@ -214,7 +217,7 @@ final class LLMChatViewModel: ObservableObject {
Task { Task {
let tempImagePath = FileOperationManager.shared.generateTempImagePath().path let tempImagePath = FileOperationManager.shared.generateTempImagePath().path
var lastProcess:Int32 = 0 var lastProcess:Int32 = 0
self.send(draft: DraftMessage(text: "Start Generating Image...", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .assistant) self.send(draft: DraftMessage(text: "Start Generating Image...", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .assistant)
@ -223,16 +226,15 @@ final class LLMChatViewModel: ObservableObject {
let userIterations = self.modelConfigManager.readIterations() let userIterations = self.modelConfigManager.readIterations()
let userSeed = self.modelConfigManager.readSeed() let userSeed = self.modelConfigManager.readSeed()
// 使 diffusion?.run(withPrompt: draft.text,
diffusion?.run(withPrompt: draft.text, imagePath: tempImagePath,
imagePath: tempImagePath, iterations: Int32(userIterations),
iterations: Int32(userIterations), seed: Int32(userSeed),
seed: Int32(userSeed), progressCallback: { [weak self] progress in
progressCallback: { [weak self] progress in
guard let self = self else { return } guard let self = self else { return }
if progress == 100 { if progress == 100 {
self.send(draft: DraftMessage(text: "Image generated successfully!", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .system) self.send(draft: DraftMessage(text: "Image generated successfully!", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .system)
self.interactor.sendImage(imageURL: URL(string: "file://" + tempImagePath)!) self.interactor.sendImage(imageURL: URL(fileURLWithPath: tempImagePath))
} else if ((progress - lastProcess) > 20) { } else if ((progress - lastProcess) > 20) {
lastProcess = progress lastProcess = progress
self.send(draft: DraftMessage(text: "Generating Image \(progress)%", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .system) self.send(draft: DraftMessage(text: "Generating Image \(progress)%", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .system)
@ -244,7 +246,7 @@ final class LLMChatViewModel: ObservableObject {
func getLLMRespsonse(draft: DraftMessage) { func getLLMRespsonse(draft: DraftMessage) {
Task { Task {
await llmState.setProcessing(true) await llmState.setProcessing(true)
await MainActor.run { await MainActor.run {
self.isProcessing = true self.isProcessing = true
let emptyMessage = DraftMessage( let emptyMessage = DraftMessage(
text: "", text: "",
@ -268,7 +270,7 @@ final class LLMChatViewModel: ObservableObject {
guard media.type == .image, let url = await media.getURL() else { guard media.type == .image, let url = await media.getURL() else {
continue continue
} }
let fileName = url.lastPathComponent let fileName = url.lastPathComponent
if let processedUrl = FileOperationManager.shared.processImageFile(from: url, fileName: fileName) { if let processedUrl = FileOperationManager.shared.processImageFile(from: url, fileName: fileName) {
@ -277,9 +279,9 @@ final class LLMChatViewModel: ObservableObject {
} }
if let audio = draft.recording, let path = audio.url { if let audio = draft.recording, let path = audio.url {
// if let wavFile = await convertACCToWAV(accFileUrl: path) { // if let wavFile = await convertACCToWAV(accFileUrl: path) {
content = "<audio>\(path.path)</audio>" + content content = "<audio>\(path.path)</audio>" + content
// } // }
} }
let convertedContent = self.convertDeepSeekMutliChat(content: content) let convertedContent = self.convertDeepSeekMutliChat(content: content)
@ -317,7 +319,7 @@ final class LLMChatViewModel: ObservableObject {
return return
} }
Task { Task {
await UIUpdateOptimizer.shared.addUpdate(output) { [weak self] output in await UIUpdateOptimizer.shared.addUpdate(output) { [weak self] output in
guard let self = self else { return } guard let self = self else { return }
self.send(draft: DraftMessage( self.send(draft: DraftMessage(
@ -390,16 +392,20 @@ final class LLMChatViewModel: ObservableObject {
interactor.connect() interactor.connect()
self.setupLLM(modelPath: self.modelInfo.localPath) self.setupLLM(modelPath: self.modelInfo.localPath)
recordModelUsage()
} }
func onStop() { func onStop() {
recordModelUsage()
ChatHistoryManager.shared.saveChat( ChatHistoryManager.shared.saveChat(
historyId: historyId, historyId: historyId,
modelInfo: modelInfo, modelInfo: modelInfo,
messages: messages messages: messages
) )
subscriptions.removeAll() subscriptions.removeAll()
interactor.disconnect() interactor.disconnect()
@ -413,10 +419,20 @@ final class LLMChatViewModel: ObservableObject {
FileOperationManager.shared.cleanModelTempFolder(modelPath: modelInfo.localPath) FileOperationManager.shared.cleanModelTempFolder(modelPath: modelInfo.localPath)
} }
} }
func loadMoreMessage(before message: Message) { func loadMoreMessage(before message: Message) {
interactor.loadNextPage() interactor.loadNextPage()
.sink { _ in } .sink { _ in }
.store(in: &subscriptions) .store(in: &subscriptions)
} }
private func recordModelUsage() {
ModelStorageManager.shared.updateLastUsed(for: modelInfo.modelName)
NotificationCenter.default.post(
name: .modelUsageUpdated,
object: nil,
userInfo: ["modelName": modelInfo.modelName]
)
}
} }

View File

@ -8,6 +8,7 @@
import Foundation import Foundation
import SwiftUI import SwiftUI
import ExyteChat import ExyteChat
import ExyteMediaPicker
import AVFoundation import AVFoundation
struct LLMChatView: View { struct LLMChatView: View {
@ -31,103 +32,128 @@ struct LLMChatView: View {
} }
var body: some View { var body: some View {
ChatView(messages: viewModel.messages, chatType: .conversation) { draft in ZStack {
viewModel.sendToLLM(draft: draft) ChatView(messages: viewModel.messages, chatType: .conversation) { draft in
} viewModel.sendToLLM(draft: draft)
.setStreamingMessageProvider {
viewModel.currentStreamingMessageId
}
.setAvailableInput(
self.title.lowercased().contains("vl") ? .textAndMedia :
self.title.lowercased().contains("audio") ? .textAndAudio :
(self.title.isEmpty ? .textOnly : .textOnly)
)
.messageUseMarkdown(true)
.setRecorderSettings(recorderSettings)
.setThinkingMode(
supportsThinkingMode: viewModel.supportsThinkingMode,
isEnabled: viewModel.isThinkingModeEnabled,
onToggle: {
viewModel.toggleThinkingMode()
} }
) .setStreamingMessageProvider {
.chatTheme( viewModel.currentStreamingMessageId
ChatTheme( }
colors: .init( .setAvailableInput(
messageMyBG: .customBlue.opacity(0.2), self.title.lowercased().contains("omni") ? .full:
messageFriendBG: .clear self.title.lowercased().contains("vl") ? .textAndMedia :
), self.title.lowercased().contains("audio") ? .textAndAudio :
images: .init( (self.title.isEmpty ? .textOnly : .textOnly)
attach: Image(systemName: "photo"), )
attachCamera: Image("attachCamera", bundle: .current) .messageUseMarkdown(true)
.setRecorderSettings(recorderSettings)
.setThinkingMode(
supportsThinkingMode: viewModel.supportsThinkingMode,
isEnabled: viewModel.isThinkingModeEnabled,
onToggle: {
viewModel.toggleThinkingMode()
}
)
.setMediaPickerSelectionParameters(
MediaPickerParameters(mediaType: .photo,
selectionLimit: 1,
showFullscreenPreview: false)
)
.chatTheme(
ChatTheme(
colors: .init(
messageMyBG: .customBlue.opacity(0.2),
messageFriendBG: .clear
),
images: .init(
attach: Image(systemName: "photo"),
attachCamera: Image("attachCamera", bundle: .current)
)
) )
) )
) .mediaPickerTheme(
.mediaPickerTheme( main: .init(
main: .init( text: .white,
text: .white, albumSelectionBackground: .customPickerBg,
albumSelectionBackground: .customPickerBg, fullscreenPhotoBackground: .customPickerBg,
fullscreenPhotoBackground: .customPickerBg, cameraBackground: .black,
cameraBackground: .black, cameraSelectionBackground: .black
cameraSelectionBackground: .black ),
), selection: .init(
selection: .init( emptyTint: .white,
emptyTint: .white, emptyBackground: .black.opacity(0.25),
emptyBackground: .black.opacity(0.25), selectedTint: .customBlue,
selectedTint: .customBlue, fullscreenTint: .white
fullscreenTint: .white )
) )
) .navigationBarTitle("")
.navigationBarTitle("") .navigationBarTitleDisplayMode(.inline)
.navigationBarTitleDisplayMode(.inline) .navigationBarBackButtonHidden()
.navigationBarBackButtonHidden() .disabled(viewModel.chatInputUnavilable)
.disabled(viewModel.chatInputUnavilable) .toolbar {
.toolbar { ToolbarItem(placement: .navigationBarLeading) {
ToolbarItem(placement: .navigationBarLeading) { Button {
Button { presentationMode.wrappedValue.dismiss()
presentationMode.wrappedValue.dismiss() } label: {
} label: { Image("backArrow", bundle: .current)
Image("backArrow", bundle: .current) }
} }
}
ToolbarItem(placement: .principal) { ToolbarItem(placement: .principal) {
HStack { HStack {
VStack(alignment: .leading, spacing: 0) { VStack(alignment: .leading, spacing: 0) {
Text(title) Text(title)
.fontWeight(.semibold) .fontWeight(.semibold)
.font(.headline) .font(.headline)
.foregroundColor(.black) .foregroundColor(.black)
Text(viewModel.chatStatus) Text(viewModel.chatStatus)
.font(.footnote) .font(.footnote)
.foregroundColor(Color(hex: "AFB3B8")) .foregroundColor(Color(hex: "AFB3B8"))
}
Spacer()
} }
Spacer() .padding(.leading, 10)
} }
.padding(.leading, 10)
}
ToolbarItem(placement: .navigationBarTrailing) { ToolbarItem(placement: .navigationBarTrailing) {
HStack(spacing: 8) { HStack(spacing: 8) {
// Settings Button // Settings Button
Button(action: { showSettings.toggle() }) { Button(action: { showSettings.toggle() }) {
Image(systemName: "gear") Image(systemName: "gear")
} }
.sheet(isPresented: $showSettings) { .sheet(isPresented: $showSettings) {
ModelSettingsView(showSettings: $showSettings, viewModel: viewModel) ModelSettingsView(showSettings: $showSettings, viewModel: viewModel)
}
} }
} }
} }
} .onAppear {
viewModel.onStart()
.onAppear { }
viewModel.onStart() .onDisappear(perform: viewModel.onStop)
} .onReceive(NotificationCenter.default.publisher(for: .dismissKeyboard)) { _ in
.onDisappear(perform: viewModel.onStop) // Hidden keyboard
.onReceive(NotificationCenter.default.publisher(for: .dismissKeyboard)) { _ in UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil)
// }
UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil)
// Loading overlay
if !viewModel.isModelLoaded {
Color.black.opacity(0.4)
.ignoresSafeArea()
.overlay(
VStack(spacing: 20) {
ProgressView()
.progressViewStyle(CircularProgressViewStyle(tint: .white))
.scaleEffect(1.5)
Text(NSLocalizedString("Model is loading...", comment: ""))
.font(.system(size: 15, weight: .regular))
.foregroundColor(.white)
.font(.headline)
}
)
}
} }
} }

View File

@ -32,7 +32,9 @@ class ChatHistoryDatabase {
private let updatedAt: Column<Date> private let updatedAt: Column<Date>
private init() throws { private init() throws {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! guard let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first else {
throw NSError(domain: "ChatHistoryDatabase", code: -1, userInfo: [NSLocalizedDescriptionKey: "Documents directory not found"])
}
db = try Connection("\(path)/chatHistory.sqlite3") db = try Connection("\(path)/chatHistory.sqlite3")
chatHistories = Table("chatHistories") chatHistories = Table("chatHistories")
@ -174,19 +176,19 @@ class ChatHistoryDatabase {
do { do {
let modelInfoData = modelInfoString.data(using: .utf8)! let modelInfoData = modelInfoString.data(using: .utf8)!
modelInfoObj = try JSONDecoder().decode(ModelInfo.self, from: modelInfoData) modelInfoObj = try JSONDecoder().decode(ModelInfo.self, from: modelInfoData)
print("Successfully decoded ModelInfo from JSON for history: \(history[id])") // print("Successfully decoded ModelInfo from JSON for history: \(history[id])")
} catch { } catch {
print("Failed to decode ModelInfo from JSON, using fallback: \(error)") // print("Failed to decode ModelInfo from JSON, using fallback: \(error)")
modelInfoObj = ModelInfo(modelId: history[modelId], isDownloaded: true) modelInfoObj = ModelInfo(modelId: history[modelId], isDownloaded: true)
} }
} else { } else {
// For backward compatibility // For backward compatibility
print("No modelInfo data found, using fallback for history: \(history[id])") // print("No modelInfo data found, using fallback for history: \(history[id])")
modelInfoObj = ModelInfo(modelId: history[modelId], isDownloaded: true) modelInfoObj = ModelInfo(modelId: history[modelId], isDownloaded: true)
} }
} catch { } catch {
// For backward compatibility // For backward compatibility
print("ModelInfo column not found, using fallback for history: \(history[id])") // print("ModelInfo column not found, using fallback for history: \(history[id])")
modelInfoObj = ModelInfo(modelId: history[modelId], isDownloaded: true) modelInfoObj = ModelInfo(modelId: history[modelId], isDownloaded: true)
} }

View File

@ -698,10 +698,7 @@ bool remove_directory_safely(const std::string& path) {
} }
return; return;
} }
// Performance measurement initialization
auto inference_start_time = std::chrono::high_resolution_clock::now();
// Get initial context state BEFORE inference starts // Get initial context state BEFORE inference starts
auto* context = _llm->getContext(); auto* context = _llm->getContext();
int initial_prompt_len = 0; int initial_prompt_len = 0;
@ -764,13 +761,6 @@ bool remove_directory_safely(const std::string& path) {
#else #else
{ {
#endif #endif
// Get initial context state for performance measurement
auto context = blockSelf->_llm->getContext();
int initial_prompt_len = context->prompt_len;
int initial_decode_len = context->gen_seq_len;
int64_t initial_prefill_time = context->prefill_us;
int64_t initial_decode_time = context->decode_us;
// Reset stop flag before starting inference // Reset stop flag before starting inference
blockSelf->_shouldStopInference = false; blockSelf->_shouldStopInference = false;
@ -785,6 +775,7 @@ bool remove_directory_safely(const std::string& path) {
// Start inference with initial response processing // Start inference with initial response processing
blockSelf->_llm->response(blockSelf->_history, &os, "<eop>", 1); blockSelf->_llm->response(blockSelf->_history, &os, "<eop>", 1);
int current_size = 1; int current_size = 1;
int max_new_tokens = 999999; int max_new_tokens = 999999;
@ -798,7 +789,7 @@ bool remove_directory_safely(const std::string& path) {
current_size++; current_size++;
// Small delay to allow UI updates and stop signal processing // Small delay to allow UI updates and stop signal processing
std::this_thread::sleep_for(std::chrono::milliseconds(1)); // std::this_thread::sleep_for(std::chrono::milliseconds(1));
} }
// Send appropriate end signal based on stop reason // Send appropriate end signal based on stop reason
@ -835,48 +826,32 @@ bool remove_directory_safely(const std::string& path) {
inference_end_time - inference_start_time inference_end_time - inference_start_time
); );
// Get final context state AFTER inference completes int prompt_len = 0;
int final_prompt_len = context->prompt_len; int decode_len = 0;
int final_decode_len = context->gen_seq_len; int64_t prefill_time = 0;
int64_t final_prefill_time = context->prefill_us; int64_t decode_time = 0;
int64_t final_decode_time = context->decode_us;
// Calculate differences for this inference session prompt_len += context->prompt_len;
int current_prompt_len = final_prompt_len - initial_prompt_len; decode_len += context->gen_seq_len;
int current_decode_len = final_decode_len - initial_decode_len; prefill_time += context->prefill_us;
int64_t current_prefill_time = final_prefill_time - initial_prefill_time; decode_time += context->decode_us;
int64_t current_decode_time = final_decode_time - initial_decode_time;
// Convert microseconds to seconds // Convert microseconds to seconds
float prefill_s = static_cast<float>(current_prefill_time) / 1e6f; float prefill_s = static_cast<float>(prefill_time) / 1e6f;
float decode_s = static_cast<float>(current_decode_time) / 1e6f; float decode_s = static_cast<float>(decode_time) / 1e6f;
// Calculate speeds (tokens per second) // Calculate speeds (tokens per second)
float prefill_speed = (prefill_s > 0.001f) ? float prefill_speed = (prefill_s > 0.001f) ?
static_cast<float>(current_prompt_len) / prefill_s : 0.0f; static_cast<float>(prompt_len) / prefill_s : 0.0f;
float decode_speed = (decode_s > 0.001f) ? float decode_speed = (decode_s > 0.001f) ?
static_cast<float>(current_decode_len) / decode_s : 0.0f; static_cast<float>(decode_len) / decode_s : 0.0f;
// Format performance results with better formatting // Format performance results in 2-line format
std::ostringstream performance_output; std::ostringstream performance_output;
performance_output << "\n\n > Performance Metrics:\n" performance_output << "\n\nPrefill: " << std::fixed << std::setprecision(2) << prefill_s << "s, "
<< "Total inference time: " << total_inference_time.count() << " ms\n" << prompt_len << " tokens, " << std::setprecision(2) << prefill_speed << " tokens/s\n"
<< " Prompt tokens: " << current_prompt_len << "\n" << "Decode: " << std::fixed << std::setprecision(2) << decode_s << "s, "
<< "Generated tokens: " << current_decode_len << "\n" << decode_len << " tokens, " << std::setprecision(2) << decode_speed << " tokens/s\n";
<< "Prefill time: " << std::fixed << std::setprecision(3) << prefill_s << " s\n"
<< "Decode time: " << std::fixed << std::setprecision(3) << decode_s << " s\n"
<< "Prefill speed: " << std::fixed << std::setprecision(1) << prefill_speed << " tok/s\n"
<< "Decode speed: " << std::fixed << std::setprecision(1) << decode_speed << " tok/s\n";
// Add efficiency metrics
if (current_prompt_len > 0 && current_decode_len > 0) {
float total_tokens = static_cast<float>(current_prompt_len + current_decode_len);
float total_time_s = static_cast<float>(total_inference_time.count()) / 1000.0f;
float overall_speed = total_time_s > 0.001f ? total_tokens / total_time_s : 0.0f;
performance_output << "> Overall speed: " << std::fixed << std::setprecision(1)
<< overall_speed << " tok/s\n";
}
// Output performance results on main queue // Output performance results on main queue
std::string perf_str = performance_output.str(); std::string perf_str = performance_output.str();

View File

@ -69,7 +69,14 @@
} }
}, },
"Audio Message" : { "Audio Message" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "语音信息"
}
}
}
}, },
"Benchmark" : { "Benchmark" : {
"comment" : "基准测试标签", "comment" : "基准测试标签",
@ -112,7 +119,7 @@
}, },
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "needs_review", "state" : "translated",
"value" : "清除" "value" : "清除"
} }
} }
@ -144,7 +151,7 @@
}, },
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "needs_review", "state" : "translated",
"value" : "完成" "value" : "完成"
} }
} }
@ -218,17 +225,6 @@
} }
} }
}, },
"Chat" : {
"extractionState" : "stale",
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "对话"
}
}
}
},
"ChatHistroyTitle" : { "ChatHistroyTitle" : {
"extractionState" : "manual", "extractionState" : "manual",
"localizations" : { "localizations" : {
@ -283,6 +279,16 @@
} }
} }
}, },
"Complete duration" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "完成时长"
}
}
}
},
"Completed" : { "Completed" : {
"localizations" : { "localizations" : {
"zh-Hans" : { "zh-Hans" : {
@ -303,6 +309,16 @@
} }
} }
}, },
"Decode Speed" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "解码速度"
}
}
}
},
"Delete" : { "Delete" : {
"localizations" : { "localizations" : {
"zh-Hans" : { "zh-Hans" : {
@ -380,7 +396,7 @@
}, },
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "needs_review", "state" : "translated",
"value" : "按标签筛选" "value" : "按标签筛选"
} }
} }
@ -396,7 +412,7 @@
}, },
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "needs_review", "state" : "translated",
"value" : "按厂商筛选" "value" : "按厂商筛选"
} }
} }
@ -412,12 +428,22 @@
}, },
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "needs_review", "state" : "translated",
"value" : "筛选选项" "value" : "筛选选项"
} }
} }
} }
}, },
"Generation rate" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "生成速率"
}
}
}
},
"Help" : { "Help" : {
"localizations" : { "localizations" : {
"zh-Hans" : { "zh-Hans" : {
@ -490,13 +516,12 @@
} }
} }
}, },
"Local Model" : { "Memory Usage" : {
"comment" : "本地模型标签",
"localizations" : { "localizations" : {
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "translated", "state" : "translated",
"value" : "本地模型" "value" : "内存使用"
} }
} }
} }
@ -510,6 +535,9 @@
} }
} }
} }
},
"Model is loading..." : {
}, },
"Model Market" : { "Model Market" : {
"comment" : "模型市场标签", "comment" : "模型市场标签",
@ -583,12 +611,33 @@
}, },
"zh-Hans" : { "zh-Hans" : {
"stringUnit" : { "stringUnit" : {
"state" : "needs_review", "state" : "translated",
"value" : "下载源" "value" : "下载源"
} }
} }
} }
}, },
"My Model" : {
"comment" : "我的模型标签",
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "我的模型"
}
}
}
},
"N/A" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "不可用"
}
}
}
},
"No" : { "No" : {
"localizations" : { "localizations" : {
"zh-Hans" : { "zh-Hans" : {
@ -629,6 +678,16 @@
} }
} }
}, },
"Peak memory" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "峰值内存"
}
}
}
},
"Penalty Sampler" : { "Penalty Sampler" : {
}, },
@ -655,8 +714,25 @@
} }
} }
}, },
"Prefill Speed" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "预填充速度"
}
}
}
},
"Progress" : { "Progress" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "进展"
}
}
}
}, },
"Random Seed" : { "Random Seed" : {
"localizations" : { "localizations" : {
@ -669,7 +745,14 @@
} }
}, },
"Ready" : { "Ready" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "准备"
}
}
}
}, },
"Running performance tests" : { "Running performance tests" : {
"localizations" : { "localizations" : {
@ -1240,6 +1323,38 @@
} }
} }
}, },
"Tokens per second" : {
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "每秒令牌数"
}
}
}
},
"Total Time" : {
"extractionState" : "manual",
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "总时间"
}
}
}
},
"Total Tokens" : {
"extractionState" : "stale",
"localizations" : {
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "总令牌数"
}
}
}
},
"Use HuggingFace to download" : { "Use HuggingFace to download" : {
"localizations" : { "localizations" : {
"zh-Hans" : { "zh-Hans" : {
@ -1317,12 +1432,18 @@
} }
} }
}, },
"搜索本地模型..." : { "搜索模型..." : {
"localizations" : { "localizations" : {
"en" : { "en" : {
"stringUnit" : { "stringUnit" : {
"state" : "translated", "state" : "translated",
"value" : "Search local models …" "value" : "Search Models…"
}
},
"zh-Hans" : {
"stringUnit" : {
"state" : "translated",
"value" : "搜索模型"
} }
} }
} }

View File

@ -2,9 +2,13 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0"> <plist version="1.0">
<dict> <dict>
<key>com.apple.security.app-sandbox</key> <key>com.apple.developer.kernel.extended-virtual-addressing</key>
<true/> <true/>
<key>com.apple.security.files.user-selected.read-only</key> <key>com.apple.developer.kernel.increased-memory-limit</key>
<true/> <true/>
<key>com.apple.security.app-sandbox</key>
<true/>
<key>com.apple.security.files.user-selected.read-only</key>
<true/>
</dict> </dict>
</plist> </plist>

View File

@ -19,9 +19,11 @@ class BenchmarkResultsHelper {
// MARK: - Results Processing & Statistics // MARK: - Results Processing & Statistics
/// Processes test results to generate comprehensive benchmark statistics /// Processes test results to generate comprehensive benchmark statistics
/// - Parameter testResults: Array of completed test instances /// - Parameters:
/// - testResults: Array of completed test instances
/// - totalTimeSeconds: Total benchmark runtime from start to completion
/// - Returns: Processed statistics including speed metrics and configuration details /// - Returns: Processed statistics including speed metrics and configuration details
func processTestResults(_ testResults: [TestInstance]) -> BenchmarkStatistics { func processTestResults(_ testResults: [TestInstance], totalTimeSeconds: Float = 0.0) -> BenchmarkStatistics {
guard !testResults.isEmpty else { guard !testResults.isEmpty else {
return BenchmarkStatistics.empty return BenchmarkStatistics.empty
} }
@ -65,7 +67,8 @@ class BenchmarkResultsHelper {
prefillStats: prefillStats, prefillStats: prefillStats,
decodeStats: decodeStats, decodeStats: decodeStats,
totalTokensProcessed: totalTokensProcessed, totalTokensProcessed: totalTokensProcessed,
totalTests: testResults.count totalTests: testResults.count,
totalTimeSeconds: Double(totalTimeSeconds)
) )
} }

View File

@ -14,11 +14,13 @@ struct BenchmarkResults {
let maxMemoryKb: Int64 let maxMemoryKb: Int64
let testResults: [TestInstance] let testResults: [TestInstance]
let timestamp: String let timestamp: String
let totalTimeSeconds: Float
init(modelDisplayName: String, maxMemoryKb: Int64, testResults: [TestInstance], timestamp: String) { init(modelDisplayName: String, maxMemoryKb: Int64, testResults: [TestInstance], timestamp: String, totalTimeSeconds: Float = 0.0) {
self.modelDisplayName = modelDisplayName self.modelDisplayName = modelDisplayName
self.maxMemoryKb = maxMemoryKb self.maxMemoryKb = maxMemoryKb
self.testResults = testResults self.testResults = testResults
self.timestamp = timestamp self.timestamp = timestamp
self.totalTimeSeconds = totalTimeSeconds
} }
} }

View File

@ -15,12 +15,14 @@ struct BenchmarkStatistics {
let decodeStats: SpeedStatistics? let decodeStats: SpeedStatistics?
let totalTokensProcessed: Int let totalTokensProcessed: Int
let totalTests: Int let totalTests: Int
let totalTimeSeconds: Double
static let empty = BenchmarkStatistics( static let empty = BenchmarkStatistics(
configText: "", configText: "",
prefillStats: nil, prefillStats: nil,
decodeStats: nil, decodeStats: nil,
totalTokensProcessed: 0, totalTokensProcessed: 0,
totalTests: 0 totalTests: 0,
totalTimeSeconds: 0.0
) )
} }

View File

@ -43,6 +43,10 @@ class BenchmarkViewModel: ObservableObject {
// Model list manager for getting local models // Model list manager for getting local models
private let modelListManager = ModelListManager.shared private let modelListManager = ModelListManager.shared
// Track total benchmark runtime from start to completion
private var totalBenchmarkTimeSeconds: Float = 0.0
private var benchmarkStartTime: Date?
// MARK: - Initialization & Setup // MARK: - Initialization & Setup
init() { init() {
@ -78,7 +82,7 @@ class BenchmarkViewModel: ObservableObject {
// Filter only downloaded models that are available locally // Filter only downloaded models that are available locally
availableModels = allModels.filter { model in availableModels = allModels.filter { model in
model.isDownloaded && model.localPath != "" model.isDownloaded && model.localPath != "" && !model.modelName.lowercased().contains("omni")
} }
print("BenchmarkViewModel: Loaded \(availableModels.count) available local models") print("BenchmarkViewModel: Loaded \(availableModels.count) available local models")
@ -218,6 +222,8 @@ class BenchmarkViewModel: ObservableObject {
startButtonText = String(localized: "Stop Test") startButtonText = String(localized: "Stop Test")
showProgressBar = true showProgressBar = true
showResults = false showResults = false
totalBenchmarkTimeSeconds = 0.0
benchmarkStartTime = Date()
updateStatus("Initializing benchmark...") updateStatus("Initializing benchmark...")
} }
@ -229,6 +235,7 @@ class BenchmarkViewModel: ObservableObject {
showProgressBar = false showProgressBar = false
hideStatus() hideStatus()
showResults = false showResults = false
benchmarkStartTime = nil
cleanupBenchmarkResources() cleanupBenchmarkResources()
} }
@ -309,6 +316,11 @@ extension BenchmarkViewModel: BenchmarkCallback {
let formattedProgress = formatProgressMessage(progress) let formattedProgress = formatProgressMessage(progress)
currentProgress = formattedProgress currentProgress = formattedProgress
updateStatus(formattedProgress.statusMessage) updateStatus(formattedProgress.statusMessage)
// Calculate the total runtime from benchmark start to current point
if let startTime = benchmarkStartTime {
totalBenchmarkTimeSeconds = Float(Date().timeIntervalSince(startTime))
}
} }
/// Handles benchmark completion with results processing /// Handles benchmark completion with results processing
@ -322,7 +334,8 @@ extension BenchmarkViewModel: BenchmarkCallback {
modelDisplayName: model.modelName, modelDisplayName: model.modelName,
maxMemoryKb: MemoryMonitor.shared.getMaxMemoryKb(), maxMemoryKb: MemoryMonitor.shared.getMaxMemoryKb(),
testResults: [result.testInstance], testResults: [result.testInstance],
timestamp: DateFormatter.benchmarkTimestamp.string(from: Date()) timestamp: DateFormatter.benchmarkTimestamp.string(from: Date()),
totalTimeSeconds: totalBenchmarkTimeSeconds
) )
benchmarkResults = results benchmarkResults = results
@ -417,22 +430,22 @@ class MemoryMonitor: ObservableObject {
maxMemoryKb = max(maxMemoryKb, memoryUsage) maxMemoryKb = max(maxMemoryKb, memoryUsage)
} }
/// Gets current memory usage from system using mach task info /// Gets current memory usage from system using task_vm_info for physical footprint
private func getCurrentMemoryUsage() -> Int64 { private func getCurrentMemoryUsage() -> Int64 {
var info = mach_task_basic_info() var info = task_vm_info_data_t()
var count = mach_msg_type_number_t(MemoryLayout<mach_task_basic_info>.size) / 4 var count = mach_msg_type_number_t(MemoryLayout<task_vm_info_data_t>.size) / UInt32(MemoryLayout<integer_t>.size)
let kerr: kern_return_t = withUnsafeMutablePointer(to: &info) { let kerr: kern_return_t = withUnsafeMutablePointer(to: &info) {
$0.withMemoryRebound(to: integer_t.self, capacity: 1) { $0.withMemoryRebound(to: integer_t.self, capacity: 1) {
task_info(mach_task_self_, task_info(mach_task_self_,
task_flavor_t(MACH_TASK_BASIC_INFO), task_flavor_t(TASK_VM_INFO),
$0, $0,
&count) &count)
} }
} }
if kerr == KERN_SUCCESS { if kerr == KERN_SUCCESS {
return Int64(info.resident_size) / 1024 // Convert to KB return Int64(info.phys_footprint) / 1024 // Convert to KB
} else { } else {
return 0 return 0
} }

View File

@ -15,7 +15,7 @@ struct ModelSelectionCard: View {
var body: some View { var body: some View {
VStack(alignment: .leading, spacing: 16) { VStack(alignment: .leading, spacing: 16) {
HStack { HStack {
Text("Select Model") Text(String(localized: "Select Model"))
.font(.title3) .font(.title3)
.fontWeight(.semibold) .fontWeight(.semibold)
.foregroundColor(.primary) .foregroundColor(.primary)
@ -27,7 +27,7 @@ struct ModelSelectionCard: View {
HStack { HStack {
ProgressView() ProgressView()
.scaleEffect(0.8) .scaleEffect(0.8)
Text("Loading models...") Text(String(localized: "Loading models..."))
.font(.subheadline) .font(.subheadline)
.foregroundColor(.secondary) .foregroundColor(.secondary)
} }
@ -56,7 +56,7 @@ struct ModelSelectionCard: View {
private var modelDropdownMenu: some View { private var modelDropdownMenu: some View {
Menu { Menu {
if viewModel.availableModels.isEmpty { if viewModel.availableModels.isEmpty {
Button("No models available") { Button(String(localized: "No models available")) {
// Placeholder - no action // Placeholder - no action
} }
.disabled(true) .disabled(true)
@ -69,7 +69,7 @@ struct ModelSelectionCard: View {
VStack(alignment: .leading, spacing: 2) { VStack(alignment: .leading, spacing: 2) {
Text(model.modelName) Text(model.modelName)
.font(.system(size: 14, weight: .medium)) .font(.system(size: 14, weight: .medium))
Text("Local") Text(String(localized: "Local"))
.font(.caption) .font(.caption)
.foregroundColor(.secondary) .foregroundColor(.secondary)
} }
@ -91,7 +91,7 @@ struct ModelSelectionCard: View {
Circle() Circle()
.fill(Color.benchmarkSuccess) .fill(Color.benchmarkSuccess)
.frame(width: 6, height: 6) .frame(width: 6, height: 6)
Text("Ready") Text(String(localized: "Ready"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSuccess) .foregroundColor(.benchmarkSuccess)
} }
@ -103,7 +103,7 @@ struct ModelSelectionCard: View {
} }
} }
} else { } else {
Text("Tap to select a model for testing") Text(String(localized: "Tap to select a model for testing"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
} }
@ -202,12 +202,12 @@ struct ModelSelectionCard: View {
private var statusMessages: some View { private var statusMessages: some View {
Group { Group {
if viewModel.selectedModel == nil { if viewModel.selectedModel == nil {
Text("Start benchmark after selecting your model") Text(String(localized: "Start benchmark after selecting your model"))
.font(.caption) .font(.caption)
.foregroundColor(.orange) .foregroundColor(.orange)
.padding(.horizontal, 16) .padding(.horizontal, 16)
} else if viewModel.availableModels.isEmpty { } else if viewModel.availableModels.isEmpty {
Text("No local models found. Please download a model first.") Text(String(localized: "No local models found. Please download a model first."))
.font(.caption) .font(.caption)
.foregroundColor(.orange) .foregroundColor(.orange)
.padding(.horizontal, 16) .padding(.horizontal, 16)

View File

@ -78,17 +78,17 @@ struct PerformanceMetricView: View {
HStack(spacing: 12) { HStack(spacing: 12) {
PerformanceMetricView( PerformanceMetricView(
icon: "speedometer", icon: "speedometer",
title: "Prefill Speed", title: String(localized: "Prefill Speed"),
value: "1024.5 t/s", value: "1024.5 t/s",
subtitle: "Tokens per second", subtitle: String(localized: "Tokens per second"),
color: .benchmarkGradientStart color: .benchmarkGradientStart
) )
PerformanceMetricView( PerformanceMetricView(
icon: "gauge", icon: "gauge",
title: "Decode Speed", title: String(localized: "Decode Speed"),
value: "109.8 t/s", value: "109.8 t/s",
subtitle: "Generation rate", subtitle: String(localized: "Generation rate"),
color: .benchmarkGradientEnd color: .benchmarkGradientEnd
) )
} }
@ -96,17 +96,17 @@ struct PerformanceMetricView: View {
HStack(spacing: 12) { HStack(spacing: 12) {
PerformanceMetricView( PerformanceMetricView(
icon: "memorychip", icon: "memorychip",
title: "Memory Usage", title: String(localized: "Memory Usage"),
value: "1.2 GB", value: "1.2 GB",
subtitle: "Peak memory", subtitle: String(localized: "Peak memory"),
color: .benchmarkWarning color: .benchmarkWarning
) )
PerformanceMetricView( PerformanceMetricView(
icon: "clock", icon: "clock",
title: "Total Time", title: String(localized: "Total Time"),
value: "2.456s", value: "2.456s",
subtitle: "Complete duration", subtitle: String(localized: "Complete duration"),
color: .benchmarkSuccess color: .benchmarkSuccess
) )
} }

View File

@ -60,12 +60,12 @@ struct ProgressCard: View {
} }
VStack(alignment: .leading, spacing: 2) { VStack(alignment: .leading, spacing: 2) {
Text("Test Progress") Text(String(localized: "Test Progress"))
.font(.title3) .font(.title3)
.fontWeight(.semibold) .fontWeight(.semibold)
.foregroundColor(.primary) .foregroundColor(.primary)
Text("Running performance tests") Text(String(localized: "Running performance tests"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
} }
@ -79,7 +79,7 @@ struct ProgressCard: View {
.fontWeight(.bold) .fontWeight(.bold)
.foregroundColor(.benchmarkAccent) .foregroundColor(.benchmarkAccent)
Text("Complete") Text(String(localized: "Complete"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
} }
@ -156,7 +156,7 @@ struct ProgressCard: View {
private var fallbackProgress: some View { private var fallbackProgress: some View {
VStack(alignment: .leading, spacing: 8) { VStack(alignment: .leading, spacing: 8) {
Text("Progress") Text(String(localized: "Progress"))
.font(.headline) .font(.headline)
ProgressView() ProgressView()
.progressViewStyle(LinearProgressViewStyle()) .progressViewStyle(LinearProgressViewStyle())

View File

@ -34,7 +34,7 @@ struct ResultsCard: View {
private var infoHeader: some View { private var infoHeader: some View {
let statistics = BenchmarkResultsHelper.shared.processTestResults(results.testResults) let statistics = BenchmarkResultsHelper.shared.processTestResults(results.testResults, totalTimeSeconds: results.totalTimeSeconds)
return VStack(alignment: .leading, spacing: 8) { return VStack(alignment: .leading, spacing: 8) {
Text(results.modelDisplayName) Text(results.modelDisplayName)
@ -43,7 +43,7 @@ struct ResultsCard: View {
.font(.subheadline) .font(.subheadline)
.foregroundColor(.secondary) .foregroundColor(.secondary)
Text("Benchmark Config") Text(String(localized: "Benchmark Config"))
.font(.headline) .font(.headline)
Text(statistics.configText) Text(statistics.configText)
.font(.subheadline) .font(.subheadline)
@ -73,12 +73,12 @@ struct ResultsCard: View {
} }
VStack(alignment: .leading, spacing: 2) { VStack(alignment: .leading, spacing: 2) {
Text("Benchmark Results") Text(String(localized: "Benchmark Results"))
.font(.title3) .font(.title3)
.fontWeight(.semibold) .fontWeight(.semibold)
.foregroundColor(.primary) .foregroundColor(.primary)
Text("Performance analysis complete") Text(String(localized: "Performance analysis complete"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
} }
@ -94,7 +94,7 @@ struct ResultsCard: View {
.font(.title2) .font(.title2)
.foregroundColor(.benchmarkSuccess) .foregroundColor(.benchmarkSuccess)
Text("Share") Text(String(localized: "Share"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
} }
@ -105,24 +105,24 @@ struct ResultsCard: View {
private var performanceMetrics: some View { private var performanceMetrics: some View {
let statistics = BenchmarkResultsHelper.shared.processTestResults(results.testResults) let statistics = BenchmarkResultsHelper.shared.processTestResults(results.testResults, totalTimeSeconds: results.totalTimeSeconds)
return VStack(spacing: 16) { return VStack(spacing: 16) {
HStack(spacing: 12) { HStack(spacing: 12) {
if let prefillStats = statistics.prefillStats { if let prefillStats = statistics.prefillStats {
PerformanceMetricView( PerformanceMetricView(
icon: "speedometer", icon: "speedometer",
title: "Prefill Speed", title: String(localized: "Prefill Speed"),
value: BenchmarkResultsHelper.shared.formatSpeedStatisticsLine(prefillStats), value: BenchmarkResultsHelper.shared.formatSpeedStatisticsLine(prefillStats),
subtitle: "Tokens per second", subtitle: String(localized: "Tokens per second"),
color: .benchmarkGradientStart color: .benchmarkGradientStart
) )
} else { } else {
PerformanceMetricView( PerformanceMetricView(
icon: "speedometer", icon: "speedometer",
title: "Prefill Speed", title: String(localized: "Prefill Speed"),
value: "N/A", value: String(localized: "N/A"),
subtitle: "Tokens per second", subtitle: String(localized: "Tokens per second"),
color: .benchmarkGradientStart color: .benchmarkGradientStart
) )
} }
@ -130,17 +130,17 @@ struct ResultsCard: View {
if let decodeStats = statistics.decodeStats { if let decodeStats = statistics.decodeStats {
PerformanceMetricView( PerformanceMetricView(
icon: "gauge", icon: "gauge",
title: "Decode Speed", title: String(localized: "Decode Speed"),
value: BenchmarkResultsHelper.shared.formatSpeedStatisticsLine(decodeStats), value: BenchmarkResultsHelper.shared.formatSpeedStatisticsLine(decodeStats),
subtitle: "Generation rate", subtitle: String(localized: "Generation rate"),
color: .benchmarkGradientEnd color: .benchmarkGradientEnd
) )
} else { } else {
PerformanceMetricView( PerformanceMetricView(
icon: "gauge", icon: "gauge",
title: "Decode Speed", title: String(localized: "Decode Speed"),
value: "N/A", value: String(localized: "N/A"),
subtitle: "Generation rate", subtitle: String(localized: "Generation rate"),
color: .benchmarkGradientEnd color: .benchmarkGradientEnd
) )
} }
@ -155,17 +155,17 @@ struct ResultsCard: View {
PerformanceMetricView( PerformanceMetricView(
icon: "memorychip", icon: "memorychip",
title: "Memory Usage", title: String(localized: "Memory Usage"),
value: memoryInfo.valueText, value: memoryInfo.valueText,
subtitle: "Peak memory", subtitle: String(localized: "Peak memory"),
color: .benchmarkWarning color: .benchmarkWarning
) )
PerformanceMetricView( PerformanceMetricView(
icon: "clock", icon: "clock",
title: "Total Tokens", title: String(localized: "Total Time"),
value: "\(statistics.totalTokensProcessed)", value: String(format: "%.2f s", statistics.totalTimeSeconds),
subtitle: "Complete duration", subtitle: String(localized: "Complete duration"),
color: .benchmarkSuccess color: .benchmarkSuccess
) )
} }
@ -176,9 +176,9 @@ struct ResultsCard: View {
return VStack(alignment: .leading, spacing: 12) { return VStack(alignment: .leading, spacing: 12) {
VStack(spacing: 8) { VStack(spacing: 8) {
HStack { HStack {
Text("Completed") Text(String(localized: "Completed"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
Spacer() Spacer()
Text(results.timestamp) Text(results.timestamp)
.font(.caption) .font(.caption)
@ -186,9 +186,9 @@ struct ResultsCard: View {
} }
HStack { HStack {
Text("Powered By MNN") Text(String(localized: "Powered By MNN"))
.font(.caption) .font(.caption)
.foregroundColor(.benchmarkSecondary) .foregroundColor(.benchmarkSecondary)
Spacer() Spacer()
Text(verbatim: "https://github.com/alibaba/MNN") Text(verbatim: "https://github.com/alibaba/MNN")
.font(.caption) .font(.caption)
@ -238,7 +238,7 @@ struct ResultsCard: View {
/// Formats benchmark results into shareable text format with performance metrics and hashtags /// Formats benchmark results into shareable text format with performance metrics and hashtags
private func formatResultsForSharing() -> String { private func formatResultsForSharing() -> String {
let statistics = BenchmarkResultsHelper.shared.processTestResults(results.testResults) let statistics = BenchmarkResultsHelper.shared.processTestResults(results.testResults, totalTimeSeconds: results.totalTimeSeconds)
let deviceInfo = BenchmarkResultsHelper.shared.getDeviceInfo() let deviceInfo = BenchmarkResultsHelper.shared.getDeviceInfo()
var shareText = """ var shareText = """

View File

@ -31,7 +31,7 @@ struct StatusCard: View {
} }
VStack(alignment: .leading, spacing: 4) { VStack(alignment: .leading, spacing: 4) {
Text("Status Update") Text(String(localized: "Status Update"))
.font(.subheadline) .font(.subheadline)
.fontWeight(.semibold) .fontWeight(.semibold)
.foregroundColor(.primary) .foregroundColor(.primary)

View File

@ -51,16 +51,16 @@ struct BenchmarkView: View {
.padding(.vertical, 16) .padding(.vertical, 16)
} }
} }
.alert("Stop Benchmark", isPresented: $viewModel.showStopConfirmation) { .alert(String(localized: "Stop Benchmark"), isPresented: $viewModel.showStopConfirmation) {
Button("Yes", role: .destructive) { Button(String(localized: "Yes"), role: .destructive) {
viewModel.onStopBenchmarkTapped() viewModel.onStopBenchmarkTapped()
} }
Button("No", role: .cancel) { } Button(String(localized: "No"), role: .cancel) { }
} message: { } message: {
Text("Are you sure you want to stop the benchmark test?") Text(String(localized: "Are you sure you want to stop the benchmark test?"))
} }
.alert("Error", isPresented: $viewModel.showError) { .alert(String(localized: "Error"), isPresented: $viewModel.showError) {
Button("OK") { Button(String(localized: "OK")) {
viewModel.dismissError() viewModel.dismissError()
} }
} message: { } message: {

View File

@ -42,7 +42,7 @@ struct LocalModelListView: View {
} }
} }
.listStyle(.plain) .listStyle(.plain)
.searchable(text: $localSearchText, prompt: "搜索本地模型...") .searchable(text: $localSearchText, prompt: "搜索模型...")
.refreshable { .refreshable {
await viewModel.fetchModels() await viewModel.fetchModels()
} }

View File

@ -0,0 +1,25 @@
//
// MainTabItem.swift
// MNNLLMiOS
//
// Created by () on 2025/9/3.
//
import SwiftUI
struct MainTabItem: View {
let imageName: String
let title: String
let isSelected: Bool
var body: some View {
VStack {
Image(imageName)
.resizable()
.aspectRatio(contentMode: .fit)
.frame(width: 10, height: 10)
Text(title)
}
.foregroundColor(isSelected ? .primaryPurple : .gray)
}
}

View File

@ -27,7 +27,7 @@ struct MainTabView: View {
private var titles: [String] { private var titles: [String] {
[ [
NSLocalizedString("Local Model", comment: "本地模型标签"), NSLocalizedString("My Model", comment: "我的模型标签"),
NSLocalizedString("Model Market", comment: "模型市场标签"), NSLocalizedString("Model Market", comment: "模型市场标签"),
NSLocalizedString("Benchmark", comment: "基准测试标签") NSLocalizedString("Benchmark", comment: "基准测试标签")
] ]
@ -40,21 +40,21 @@ struct MainTabView: View {
createTabContent( createTabContent(
content: LocalModelListView(viewModel: modelListViewModel), content: LocalModelListView(viewModel: modelListViewModel),
title: titles[0], title: titles[0],
icon: "house.fill", icon: "home",
tag: 0 tag: 0
) )
createTabContent( createTabContent(
content: ModelListView(viewModel: modelListViewModel), content: ModelListView(viewModel: modelListViewModel),
title: titles[1], title: titles[1],
icon: "doc.text.fill", icon: "market",
tag: 1 tag: 1
) )
createTabContent( createTabContent(
content: BenchmarkView(), content: BenchmarkView(),
title: titles[2], title: titles[2],
icon: "clock.fill", icon: "benchmark",
tag: 2 tag: 2
) )
} }
@ -143,17 +143,26 @@ struct MainTabView: View {
showHistoryButton: $showHistoryButton showHistoryButton: $showHistoryButton
) )
} }
.navigationDestination(isPresented: $navigateToChat) { .navigationDestination(isPresented: Binding(
get: { navigateToChat && selectedTab == tag },
set: { _ in navigateToChat = false }
)) {
chatDestination chatDestination
} }
.navigationDestination(isPresented: $navigateToSettings) { .navigationDestination(isPresented: Binding(
get: { navigateToSettings && selectedTab == tag },
set: { _ in navigateToSettings = false }
)) {
SettingsView() SettingsView()
} }
.toolbar((navigateToChat || navigateToSettings) ? .hidden : .visible, for: .tabBar) .toolbar((navigateToChat || navigateToSettings) ? .hidden : .visible, for: .tabBar)
} }
.tabItem { .tabItem {
Image(systemName: icon) MainTabItem(
Text(title) imageName: selectedTab == tag ? "\(icon)Fill" : icon,
title: title,
isSelected: selectedTab == tag
)
} }
.tag(tag) .tag(tag)
} }

View File

@ -7,6 +7,7 @@
import Foundation import Foundation
import SwiftUI import SwiftUI
import Combine
class ModelListViewModel: ObservableObject { class ModelListViewModel: ObservableObject {
// MARK: - Published Properties // MARK: - Published Properties
@ -21,8 +22,9 @@ class ModelListViewModel: ObservableObject {
@Published private(set) var currentlyDownloading: String? @Published private(set) var currentlyDownloading: String?
// MARK: - Private Properties // MARK: - Private Properties
private let modelClient = ModelClient() private let modelClient = ModelClient.shared
private let pinnedModelKey = "com.mnnllm.pinnedModelIds" private let pinnedModelKey = "com.mnnllm.pinnedModelIds"
private var cancellables = Set<AnyCancellable>()
// MARK: - Model Data Access // MARK: - Model Data Access
@ -49,6 +51,17 @@ class ModelListViewModel: ObservableObject {
Task { @MainActor in Task { @MainActor in
await fetchModels() await fetchModels()
} }
NotificationCenter.default
.publisher(for: .modelUsageUpdated)
.sink { [weak self] notification in
if let modelName = notification.userInfo?["modelName"] as? String {
Task { @MainActor in
self?.updateModelLastUsed(modelName: modelName)
}
}
}
.store(in: &cancellables)
} }
// MARK: - Model Data Management // MARK: - Model Data Management
@ -357,7 +370,10 @@ class ModelListViewModel: ObservableObject {
await MainActor.run { await MainActor.run {
guard currentlyDownloading == nil else { return } guard currentlyDownloading == nil else { return }
currentlyDownloading = model.id currentlyDownloading = model.id
downloadProgress[model.id] = 0
if downloadProgress[model.id] == nil {
downloadProgress[model.id] = 0
}
} }
do { do {
@ -372,6 +388,9 @@ class ModelListViewModel: ObservableObject {
self.models[index].isDownloaded = true self.models[index].isDownloaded = true
ModelStorageManager.shared.markModelAsDownloaded(model.modelName) ModelStorageManager.shared.markModelAsDownloaded(model.modelName)
} }
self.downloadProgress.removeValue(forKey: model.id)
self.currentlyDownloading = nil
} }
// Calculate and cache size for newly downloaded model // Calculate and cache size for newly downloaded model
@ -392,26 +411,22 @@ class ModelListViewModel: ObservableObject {
if case ModelScopeError.downloadCancelled = error { if case ModelScopeError.downloadCancelled = error {
print("Download was cancelled") print("Download was cancelled")
} else { } else {
self.downloadProgress.removeValue(forKey: model.id)
self.showError = true self.showError = true
self.errorMessage = "Failed to download model: \(error.localizedDescription)" self.errorMessage = "Failed to download model: \(error.localizedDescription)"
} }
self.currentlyDownloading = nil
} }
} }
await MainActor.run {
self.currentlyDownloading = nil
self.downloadProgress.removeValue(forKey: model.id)
}
} }
func cancelDownload() async { func cancelDownload() async {
let modelId = await MainActor.run { currentlyDownloading } let modelId = await MainActor.run { currentlyDownloading }
if let modelId = modelId { if let modelId = modelId {
await modelClient.cancelDownload() await modelClient.cancelDownload(for: modelId)
await MainActor.run { await MainActor.run {
self.downloadProgress.removeValue(forKey: modelId)
self.currentlyDownloading = nil self.currentlyDownloading = nil
} }
@ -488,4 +503,20 @@ class ModelListViewModel: ObservableObject {
} }
} }
} }
@MainActor
private func updateModelLastUsed(modelName: String) {
if let index = models.firstIndex(where: { $0.modelName == modelName }) {
if let lastUsed = ModelStorageManager.shared.getLastUsed(for: modelName) {
models[index].lastUsedAt = lastUsed
sortModels(fetchedModels: &models)
}
}
}
}
// MARK: - Notification Names
extension Notification.Name {
static let modelUsageUpdated = Notification.Name("modelUsageUpdated")
} }

View File

@ -9,7 +9,7 @@ import SwiftUI
struct HelpView: View { struct HelpView: View {
var body: some View { var body: some View {
WebView(url: URL(string: "https://github.com/alibaba/MNN")!) // ?tab=readme-ov-file#intro WebView(url: URL(string: "https://github.com/alibaba/MNN") ?? URL(fileURLWithPath: "/")) // ?tab=readme-ov-file#intro
.navigationTitle("Help") .navigationTitle("Help")
.navigationBarTitleDisplayMode(.inline) .navigationBarTitleDisplayMode(.inline)
} }

View File

@ -16,27 +16,48 @@ struct ModelListView: View {
@State private var selectedCategories: Set<String> = [] @State private var selectedCategories: Set<String> = []
@State private var selectedVendors: Set<String> = [] @State private var selectedVendors: Set<String> = []
@State private var showFilterMenu = false @State private var showFilterMenu = false
private let topID = "topID"
var body: some View { var body: some View {
ScrollView { ScrollViewReader { proxy in
LazyVStack(spacing: 0, pinnedViews: [.sectionHeaders]) { ScrollView {
Section { LazyVStack(spacing: 0, pinnedViews: [.sectionHeaders]) {
modelListSection
} header: { Color.clear.frame(height: 0).id(topID)
toolbarSection
Section {
modelListSection
} header: {
toolbarSection
}
} }
} }
} .searchable(text: $searchText, prompt: "Search models...")
.searchable(text: $searchText, prompt: "Search models...") .refreshable {
.refreshable { await viewModel.fetchModels()
await viewModel.fetchModels() }
} .alert("Error", isPresented: $viewModel.showError) {
.alert("Error", isPresented: $viewModel.showError) { Button("OK") {
Button("OK") { viewModel.dismissError()
viewModel.dismissError() }
} message: {
Text(viewModel.errorMessage)
}
// Auto-scroll to top when filters change to avoid blank screen when data shrinks
.onChange(of: selectedTags) { old, new in
withAnimation { proxy.scrollTo(topID, anchor: .top) }
}
.onChange(of: selectedCategories) { old, new in
withAnimation { proxy.scrollTo(topID, anchor: .top) }
}
.onChange(of: selectedVendors) { old, new in
withAnimation { proxy.scrollTo(topID, anchor: .top) }
}
.onChange(of: showFilterMenu) { old, new in
if old != new {
withAnimation { proxy.scrollTo(topID, anchor: .top) }
}
} }
} message: {
Text(viewModel.errorMessage)
} }
} }

View File

@ -28,13 +28,16 @@ import Foundation
/// ///
/// Usage: /// Usage:
/// ```swift /// ```swift
/// let client = ModelClient() /// let client = ModelClient.shared
/// let models = try await client.getModelInfo() /// let models = try await client.getModelInfo()
/// try await client.downloadModel(model: selectedModel) { progress in /// try await client.downloadModel(model: selectedModel) { progress in
/// print("Download progress: \(progress * 100)%") /// print("Download progress: \(progress * 100)%")
/// } /// }
/// ``` /// ```
class ModelClient { class ModelClient {
// MARK: - Singleton
static let shared = ModelClient()
// MARK: - Properties // MARK: - Properties
private let maxRetries = 5 private let maxRetries = 5
@ -46,8 +49,9 @@ class ModelClient {
// Debug flag to use local mock data instead of network API // Debug flag to use local mock data instead of network API
private let useLocalMockData = false private let useLocalMockData = false
private var currentDownloadManager: ModelDownloadManagerProtocol? private var downloadManagers: [String: ModelDownloadManagerProtocol] = [:]
private let downloadManagerFactory: ModelDownloadManagerFactory private let downloadManagerFactory: ModelDownloadManagerFactory
private let downloadManagerQueue = DispatchQueue(label: "com.mnn.downloadManager", attributes: .concurrent)
private lazy var baseURLString: String = { private lazy var baseURLString: String = {
switch ModelSourceManager.shared.selectedSource { switch ModelSourceManager.shared.selectedSource {
@ -58,14 +62,20 @@ class ModelClient {
} }
}() }()
/// Creates a ModelClient with dependency injection for download manager /// Private initializer for singleton pattern
/// ///
/// - Parameter downloadManagerFactory: Factory for creating download managers. /// - Parameter downloadManagerFactory: Factory for creating download managers.
/// Defaults to DefaultModelDownloadManagerFactory /// Defaults to LegacyModelDownloadManagerFactory
init(downloadManagerFactory: ModelDownloadManagerFactory = DefaultModelDownloadManagerFactory()) { private init(downloadManagerFactory: ModelDownloadManagerFactory = LegacyModelDownloadManagerFactory()) {
print("ModelClient singleton initialized")
self.downloadManagerFactory = downloadManagerFactory self.downloadManagerFactory = downloadManagerFactory
} }
deinit {
print("ModelClient deinit")
downloadManagers.removeAll()
}
/// Retrieves model information from the configured API endpoint /// Retrieves model information from the configured API endpoint
/// ///
/// This method fetches the latest model catalog from the network API. /// This method fetches the latest model catalog from the network API.
@ -140,15 +150,30 @@ class ModelClient {
} }
} }
/// Cancels the current download operation /// Cancels download for a specific model
func cancelDownload() async { /// - Parameter modelId: The ID of the model to cancel download for
switch ModelSourceManager.shared.selectedSource { func cancelDownload(for modelId: String) async {
case .modelScope, .modeler: downloadManagerQueue.sync {
await currentDownloadManager?.cancelDownload() if let downloadManager = downloadManagers[modelId] {
case .huggingFace: Task {
// TODO: await currentDownloadManager?.cancelDownload() await downloadManager.cancelDownload()
// try await mirrorHubApi. }
print("cant stop") }
}
}
/// Cancels all active downloads
func cancelAllDownloads() async {
let managers = downloadManagerQueue.sync {
return Array(downloadManagers.values)
}
await withTaskGroup(of: Void.self) { group in
for manager in managers {
group.addTask {
await manager.cancelDownload()
}
}
} }
} }
@ -161,14 +186,21 @@ class ModelClient {
private func downloadFromModelScope(_ model: ModelInfo, private func downloadFromModelScope(_ model: ModelInfo,
source: ModelSource, source: ModelSource,
progress: @escaping (Double) -> Void) async throws { progress: @escaping (Double) -> Void) async throws {
let downloadManager = downloadManagerQueue.sync {
currentDownloadManager = downloadManagerFactory.createDownloadManager( if let existingManager = downloadManagers[model.id] {
repoPath: model.id, return existingManager
source: .modelScope } else {
) let newManager = downloadManagerFactory.createDownloadManager(
repoPath: model.id,
source: .modelScope
)
downloadManagers[model.id] = newManager
return newManager
}
}
do { do {
try await currentDownloadManager?.downloadModel( try await downloadManager.downloadModel(
to: "huggingface/models/taobao-mnn", to: "huggingface/models/taobao-mnn",
modelId: model.id, modelId: model.id,
modelName: model.modelName modelName: model.modelName
@ -177,14 +209,30 @@ class ModelClient {
progress(fileProgress) progress(fileProgress)
} }
} }
await cleanupDownloadManager(for: model.id)
} catch { } catch {
if case ModelScopeError.downloadCancelled = error { if case ModelScopeError.downloadCancelled = error {
throw ModelScopeError.downloadCancelled throw ModelScopeError.downloadCancelled
} else { } else {
await cleanupDownloadManager(for: model.id)
throw NetworkError.downloadFailed throw NetworkError.downloadFailed
} }
} }
} }
private func cleanupDownloadManager(for modelId: String) async {
_ = downloadManagerQueue.sync {
downloadManagers.removeValue(forKey: modelId)
}
}
func getActiveDownloadersCount() -> Int {
return downloadManagerQueue.sync {
return downloadManagers.count
}
}
/// Downloads model from HuggingFace platform with optimized progress updates /// Downloads model from HuggingFace platform with optimized progress updates
/// ///
@ -205,7 +253,7 @@ class ModelClient {
var lastUpdateTime = Date() var lastUpdateTime = Date()
var lastProgress: Double = 0.0 var lastProgress: Double = 0.0
let progressUpdateInterval: TimeInterval = 0.1 // Limit update frequency to every 100ms let progressUpdateInterval: TimeInterval = 0.1 // Limit update frequency to every 100ms
let progressThreshold: Double = 0.01 // Progress change threshold of 1% let progressThreshold: Double = 0.001 // Progress change threshold of 0.1%
try await mirrorHubApi.snapshot(from: repo, matching: modelFiles) { fileProgress in try await mirrorHubApi.snapshot(from: repo, matching: modelFiles) { fileProgress in
let currentProgress = fileProgress.fractionCompleted let currentProgress = fileProgress.fractionCompleted

View File

@ -46,11 +46,32 @@ struct ChunkInfo {
struct DownloadProgress { struct DownloadProgress {
var totalBytes: Int64 = 0 var totalBytes: Int64 = 0
var downloadedBytes: Int64 = 0
var activeDownloads: Int = 0 var activeDownloads: Int = 0
var completedFiles: Int = 0 var completedFiles: Int = 0
var totalFiles: Int = 0 var totalFiles: Int = 0
// Track individual file progress
var fileProgress: [String: FileDownloadProgress] = [:]
var lastReportedProgress: Double = 0.0
var progress: Double {
guard totalBytes > 0 else { return 0.0 }
let totalDownloadedBytes = fileProgress.values.reduce(0) { sum, fileProgress in
return sum + fileProgress.downloadedBytes
}
let calculatedProgress = Double(totalDownloadedBytes) / Double(totalBytes)
return min(calculatedProgress, 1.0) // Ensure progress never exceeds 100%
}
}
struct FileDownloadProgress {
let fileName: String
let totalBytes: Int64
var downloadedBytes: Int64 = 0
var isCompleted: Bool = false
var progress: Double { var progress: Double {
guard totalBytes > 0 else { return 0.0 } guard totalBytes > 0 else { return 0.0 }
return Double(downloadedBytes) / Double(totalBytes) return Double(downloadedBytes) / Double(totalBytes)

View File

@ -100,7 +100,8 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
self.session = URLSession(configuration: sessionConfig) self.session = URLSession(configuration: sessionConfig)
self.concurrencyManager = DynamicConcurrencyManager(config: concurrencyConfig) self.concurrencyManager = DynamicConcurrencyManager(config: concurrencyConfig)
self.downloadSemaphore = AsyncSemaphore(value: config.maxConcurrentDownloads) self.downloadSemaphore = AsyncSemaphore(value: config.maxConcurrentDownloads)
print("ModelClient init")
ModelDownloadLogger.isEnabled = enableLogging ModelDownloadLogger.isEnabled = enableLogging
} }
@ -216,6 +217,10 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
let subFiles = try await fetchFileList(root: file.path, revision: "") let subFiles = try await fetchFileList(root: file.path, revision: "")
try await processFiles(subFiles, destinationPath: newPath) try await processFiles(subFiles, destinationPath: newPath)
} else if file.type == "blob" { } else if file.type == "blob" {
// Initialize progress tracking for all files
await initializeFileProgress(fileName: file.name, totalBytes: Int64(file.size))
progress.totalBytes += Int64(file.size)
if !storage.isFileDownloaded(file, at: destinationPath) { if !storage.isFileDownloaded(file, at: destinationPath) {
var task = DownloadTask( var task = DownloadTask(
file: file, file: file,
@ -230,9 +235,14 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
} }
downloadQueue.append(task) downloadQueue.append(task)
progress.totalBytes += Int64(file.size)
} else { } else {
progress.downloadedBytes += Int64(file.size) // File already downloaded, mark as completed in progress tracking
if var fileProgress = progress.fileProgress[file.name] {
fileProgress.downloadedBytes = fileProgress.totalBytes
fileProgress.isCompleted = true
progress.fileProgress[file.name] = fileProgress
}
ModelDownloadLogger.info("File \(file.name) already exists, skipping download")
} }
} }
} }
@ -261,9 +271,12 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
let startOffset = Int64(i) * chunkSize let startOffset = Int64(i) * chunkSize
let endOffset = min(startOffset + chunkSize - 1, fileSize - 1) let endOffset = min(startOffset + chunkSize - 1, fileSize - 1)
let modelHash = repoPath.hash let modelHash = repoPath.stableHash
let fileHash = file.path.hash let fileHash = file.path.stableHash
let tempURL = FileManager.default.temporaryDirectory
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let downloadsURL = documentsURL.appendingPathComponent(".downloads", isDirectory: true)
let tempURL = downloadsURL
.appendingPathComponent("model_\(modelHash)_file_\(fileHash)_chunk_\(i)_\(file.name.sanitizedPath).tmp") .appendingPathComponent("model_\(modelHash)_file_\(fileHash)_chunk_\(i)_\(file.name.sanitizedPath).tmp")
// Check if chunk already exists and calculate downloaded bytes // Check if chunk already exists and calculate downloaded bytes
@ -294,6 +307,31 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
return chunks return chunks
} }
/// Calculate initial progress from existing downloaded files and chunks
private func calculateInitialProgress() async {
for task in downloadQueue {
if task.isChunked {
// For chunked files, sum up downloaded bytes from all chunks
let chunkBytes = task.chunks.reduce(0) { total, chunk in
return total + (chunk.isCompleted ? (chunk.endOffset - chunk.startOffset + 1) : chunk.downloadedBytes)
}
// Update file progress with chunk data
if var fileProgress = progress.fileProgress[task.file.name] {
fileProgress.downloadedBytes = chunkBytes
progress.fileProgress[task.file.name] = fileProgress
}
}
// For non-chunked files, if they exist, they would not be in downloadQueue
}
let totalDownloadedBytes = progress.fileProgress.values.reduce(0) { sum, fileProgress in
return sum + fileProgress.downloadedBytes
}
ModelDownloadLogger.info("Initial downloaded bytes: \(totalDownloadedBytes)")
}
// MARK: - Download Execution // MARK: - Download Execution
/// Executes download tasks with dynamic concurrency management /// Executes download tasks with dynamic concurrency management
@ -304,6 +342,9 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
/// ///
/// - Throws: ModelScopeError if downloads fail or are cancelled /// - Throws: ModelScopeError if downloads fail or are cancelled
private func executeDownloads() async throws { private func executeDownloads() async throws {
// Calculate initial downloaded bytes from existing files and chunks
await calculateInitialProgress()
await withTaskGroup(of: Void.self) { group in await withTaskGroup(of: Void.self) { group in
for task in downloadQueue { for task in downloadQueue {
if isCancelled { break } if isCancelled { break }
@ -350,12 +391,11 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
ModelDownloadLogger.info("Using optimal concurrency: \(concurrencyCount) for \(task.chunks.count) chunks") ModelDownloadLogger.info("Using optimal concurrency: \(concurrencyCount) for \(task.chunks.count) chunks")
// Check if any chunks are already completed and update progress // Check if any chunks are already completed and log progress (but don't update global progress yet)
let completedBytes = task.chunks.reduce(0) { total, chunk in let completedBytes = task.chunks.reduce(0) { total, chunk in
return total + (chunk.isCompleted ? (chunk.endOffset - chunk.startOffset + 1) : chunk.downloadedBytes) return total + (chunk.isCompleted ? (chunk.endOffset - chunk.startOffset + 1) : chunk.downloadedBytes)
} }
if completedBytes > 0 { if completedBytes > 0 {
await updateDownloadProgress(bytes: completedBytes)
ModelDownloadLogger.info("Found \(completedBytes) bytes already downloaded for \(task.file.name)") ModelDownloadLogger.info("Found \(completedBytes) bytes already downloaded for \(task.file.name)")
} }
@ -435,22 +475,26 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
try fileHandle.seekToEnd() try fileHandle.seekToEnd()
} }
var bytesCount = 0 var buffer = Data()
buffer.reserveCapacity(512 * 1024) // Reserve 512KB buffer for chunks
for try await byte in asyncBytes { for try await byte in asyncBytes {
if isCancelled { throw ModelScopeError.downloadCancelled } if isCancelled { throw ModelScopeError.downloadCancelled }
try fileHandle.write(contentsOf: [byte]) buffer.append(byte)
bytesCount += 1
if bytesCount >= 64 * 1024 { // Write in larger chunks to reduce I/O operations
await updateDownloadProgress(bytes: Int64(bytesCount)) if buffer.count >= 128 * 1024 { // 128KB chunks for chunk downloads
bytesCount = 0 try fileHandle.write(contentsOf: buffer)
await updateFileProgress(fileName: file.name, bytes: Int64(buffer.count))
buffer.removeAll(keepingCapacity: true)
} }
} }
if bytesCount > 0 { // Write remaining buffer
await updateDownloadProgress(bytes: Int64(bytesCount)) if !buffer.isEmpty {
try fileHandle.write(contentsOf: buffer)
await updateFileProgress(fileName: file.name, bytes: Int64(buffer.count))
} }
ModelDownloadLogger.info("Chunk \(chunk.index) downloaded successfully") ModelDownloadLogger.info("Chunk \(chunk.index) downloaded successfully")
@ -498,9 +542,15 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
let destination = URL(fileURLWithPath: task.destinationPath) let destination = URL(fileURLWithPath: task.destinationPath)
.appendingPathComponent(file.name.sanitizedPath) .appendingPathComponent(file.name.sanitizedPath)
let modelHash = repoPath.hash let modelHash = repoPath.stableHash
let fileHash = file.path.hash let fileHash = file.path.stableHash
let tempURL = FileManager.default.temporaryDirectory
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let downloadsURL = documentsURL.appendingPathComponent(".downloads", isDirectory: true)
try? fileManager.createDirectory(at: downloadsURL, withIntermediateDirectories: true, attributes: nil)
let tempURL = downloadsURL
.appendingPathComponent("model_\(modelHash)_file_\(fileHash)_\(file.name.sanitizedPath).tmp") .appendingPathComponent("model_\(modelHash)_file_\(fileHash)_\(file.name.sanitizedPath).tmp")
var lastError: Error? var lastError: Error?
@ -534,27 +584,32 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
} }
var downloadedBytes: Int64 = resumeOffset var downloadedBytes: Int64 = resumeOffset
var bytesCount = 0 var buffer = Data()
buffer.reserveCapacity(1024 * 1024) // Reserve 1MB buffer
for try await byte in asyncBytes { for try await byte in asyncBytes {
if isCancelled { throw ModelScopeError.downloadCancelled } if isCancelled { throw ModelScopeError.downloadCancelled }
try fileHandle.write(contentsOf: [byte]) buffer.append(byte)
downloadedBytes += 1
bytesCount += 1
// Update progress less frequently // Write in larger chunks to reduce I/O operations
if bytesCount >= 64 * 1024 { if buffer.count >= 256 * 1024 { // 256KB chunks
await updateDownloadProgress(bytes: Int64(bytesCount)) try fileHandle.write(contentsOf: buffer)
bytesCount = 0 downloadedBytes += Int64(buffer.count)
await updateFileProgress(fileName: file.name, bytes: Int64(buffer.count))
buffer.removeAll(keepingCapacity: true)
} }
} }
// Final progress update // Write remaining buffer
if bytesCount > 0 { if !buffer.isEmpty {
await updateDownloadProgress(bytes: Int64(bytesCount)) try fileHandle.write(contentsOf: buffer)
downloadedBytes += Int64(buffer.count)
await updateFileProgress(fileName: file.name, bytes: Int64(buffer.count))
} }
// Progress already updated in the loop above
// Move to final destination // Move to final destination
if fileManager.fileExists(atPath: destination.path) { if fileManager.fileExists(atPath: destination.path) {
try fileManager.removeItem(at: destination) try fileManager.removeItem(at: destination)
@ -658,13 +713,42 @@ public actor ModelDownloadManager: ModelDownloadManagerProtocol {
private func markFileCompleted(task: DownloadTask) async { private func markFileCompleted(task: DownloadTask) async {
progress.completedFiles += 1 progress.completedFiles += 1
// Mark file as completed in progress tracking
if var fileProgress = progress.fileProgress[task.file.name] {
fileProgress.downloadedBytes = fileProgress.totalBytes
fileProgress.isCompleted = true
progress.fileProgress[task.file.name] = fileProgress
}
storage.saveFileStatus(task.file, at: task.destinationPath) storage.saveFileStatus(task.file, at: task.destinationPath)
ModelDownloadLogger.info("Completed: \(task.file.name) (\(progress.completedFiles)/\(progress.totalFiles))") ModelDownloadLogger.info("Completed: \(task.file.name) (\(progress.completedFiles)/\(progress.totalFiles))")
await updateProgress(progress.progress)
} }
private func updateDownloadProgress(bytes: Int64) async { private func updateFileProgress(fileName: String, bytes: Int64) async {
progress.downloadedBytes += bytes if var fileProgress = progress.fileProgress[fileName] {
await updateProgress(progress.progress) fileProgress.downloadedBytes = min(fileProgress.downloadedBytes + bytes, fileProgress.totalBytes)
progress.fileProgress[fileName] = fileProgress
let newProgress = progress.progress
let progressDiff = abs(newProgress - progress.lastReportedProgress)
if progressDiff >= 0.001 || newProgress >= 1.0 {
progress.lastReportedProgress = newProgress
await updateProgress(newProgress)
}
}
}
private func initializeFileProgress(fileName: String, totalBytes: Int64) async {
let fileProgress = FileDownloadProgress(
fileName: fileName,
totalBytes: totalBytes,
downloadedBytes: 0,
isCompleted: false
)
progress.fileProgress[fileName] = fileProgress
} }
private func updateProgress(_ value: Double) async { private func updateProgress(_ value: Double) async {

View File

@ -62,7 +62,8 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
// MARK: - Properties // MARK: - Properties
private let repoPath: String private let repoPath: String
private let session: URLSession private var session: URLSession
private let sessionConfig: URLSessionConfiguration
private let fileManager: FileManager private let fileManager: FileManager
private let storage: ModelDownloadStorage private let storage: ModelDownloadStorage
@ -72,6 +73,7 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
private var totalSize: Int64 = 0 private var totalSize: Int64 = 0
private var downloadedSize: Int64 = 0 private var downloadedSize: Int64 = 0
private var lastUpdatedBytes: Int64 = 0 private var lastUpdatedBytes: Int64 = 0
private var lastReportedProgress: Double = 0.0
// Download cancellation related properties // Download cancellation related properties
private var isCancelled: Bool = false private var isCancelled: Bool = false
@ -98,9 +100,17 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
self.repoPath = repoPath self.repoPath = repoPath
self.fileManager = .default self.fileManager = .default
self.storage = ModelDownloadStorage() self.storage = ModelDownloadStorage()
self.sessionConfig = config
self.session = URLSession(configuration: config) self.session = URLSession(configuration: config)
self.source = source self.source = source
ModelDownloadLogger.isEnabled = enableLogging ModelDownloadLogger.isEnabled = enableLogging
print("ModelScopeDownloadManager init")
}
deinit {
// Clean up session when the manager is deallocated
session.invalidateAndCancel()
print("ModelScopeDownloadManager deinit")
} }
// MARK: - Public Methods // MARK: - Public Methods
@ -135,6 +145,9 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
progress: ((Double) -> Void)? = nil progress: ((Double) -> Void)? = nil
) async throws { ) async throws {
// Ensure we have a valid session before starting download
ensureValidSession()
isCancelled = false isCancelled = false
ModelDownloadLogger.info("Starting download for modelId: \(modelId)") ModelDownloadLogger.info("Starting download for modelId: \(modelId)")
@ -158,7 +171,7 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
/// ///
/// This method gracefully stops all active downloads, closes file handles, /// This method gracefully stops all active downloads, closes file handles,
/// and preserves temporary files to enable resume functionality in future attempts. /// and preserves temporary files to enable resume functionality in future attempts.
/// The URLSession is invalidated to ensure clean cancellation. /// The URLSession is kept valid to allow future downloads.
public func cancelDownload() async { public func cancelDownload() async {
isCancelled = true isCancelled = true
@ -167,7 +180,8 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
await closeFileHandle() await closeFileHandle()
session.invalidateAndCancel() // Don't invalidate session to allow future downloads
// session.invalidateAndCancel()
ModelDownloadLogger.info("Download cancelled, temporary files preserved for resume") ModelDownloadLogger.info("Download cancelled, temporary files preserved for resume")
} }
@ -180,11 +194,25 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
/// - progress: Current progress value (0.0 to 1.0) /// - progress: Current progress value (0.0 to 1.0)
/// - callback: Progress callback function to invoke on main thread /// - callback: Progress callback function to invoke on main thread
private func updateProgress(_ progress: Double, callback: @escaping (Double) -> Void) { private func updateProgress(_ progress: Double, callback: @escaping (Double) -> Void) {
Task { @MainActor in // Only update UI progress if there's a significant change (>0.1%)
callback(progress) let progressDiff = abs(progress - lastReportedProgress)
if progressDiff >= 0.001 || progress >= 1.0 {
lastReportedProgress = progress
Task { @MainActor in
callback(progress)
}
} }
} }
/// Ensures the URLSession is valid by recreating it if necessary
///
/// This method provides a simple and reliable way to ensure we have a valid URLSession
/// by always creating a fresh session before downloads. This prevents any potential
/// "Task created in a session that has been invalidated" errors.
private func ensureValidSession() {
session = URLSession(configuration: sessionConfig)
}
/// Fetches the complete file list from ModelScope or Modeler repository /// Fetches the complete file list from ModelScope or Modeler repository
/// ///
/// This method queries the repository API to discover all available files, /// This method queries the repository API to discover all available files,
@ -215,28 +243,28 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
} }
/** /// Downloads a single file with intelligent resume and retry mechanisms.
* Downloads a single file with intelligent resume and retry mechanisms ///
* /// This method handles individual file downloads with comprehensive error recovery,
* This method handles individual file downloads with comprehensive error recovery, /// resume functionality through temporary files, and progress tracking. It supports
* resume functionality through temporary files, and progress tracking. It supports /// both ModelScope and Modeler platforms with platform-specific URL construction.
* both ModelScope and Modeler platforms with platform-specific URL construction. ///
* /// - Features:
* Features: /// - Automatic resume from temporary files using HTTP Range requests
* - Automatic resume from temporary files using HTTP Range requests /// - Exponential backoff retry mechanism (configurable attempts)
* - Exponential backoff retry mechanism (configurable attempts) /// - Memory-efficient streaming using URLSession.bytes
* - Memory-efficient streaming using URLSession.bytes /// - File integrity validation using size verification
* - File integrity validation using size verification /// - Progress update throttling to prevent UI blocking
* - Progress update throttling to prevent UI blocking /// - Graceful cancellation with state preservation
* - Graceful cancellation with state preservation ///
* /// - Parameters:
* @param file ModelFile metadata including path, size, and download information /// - file: ModelFile metadata including path, size, and download information.
* @param destinationPath Target local path for the downloaded file /// - destinationPath: Target local path for the downloaded file.
* @param onProgress Progress callback receiving downloaded bytes count /// - onProgress: Progress callback receiving downloaded bytes count.
* @param maxRetries Maximum number of retry attempts (default: 3) /// - maxRetries: Maximum number of retry attempts. Defaults to 3.
* @param retryDelay Delay between retry attempts in seconds (default: 2.0) /// - retryDelay: Delay between retry attempts in seconds. Defaults to 2.0.
* @throws ModelScopeError if download fails after all retry attempts ///
*/ /// - Throws: `ModelScopeError` if download fails after all retry attempts.
private func downloadFile( private func downloadFile(
file: ModelFile, file: ModelFile,
destinationPath: String, destinationPath: String,
@ -293,9 +321,15 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
let destination = URL(fileURLWithPath: destinationPath) let destination = URL(fileURLWithPath: destinationPath)
.appendingPathComponent(file.name.sanitizedPath) .appendingPathComponent(file.name.sanitizedPath)
let modelHash = repoPath.hash let modelHash = repoPath.stableHash
let fileHash = file.path.hash let fileHash = file.path.stableHash
let tempURL = FileManager.default.temporaryDirectory
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let downloadsURL = documentsURL.appendingPathComponent(".downloads", isDirectory: true)
try? fileManager.createDirectory(at: downloadsURL, withIntermediateDirectories: true, attributes: nil)
let tempURL = downloadsURL
.appendingPathComponent("model_\(modelHash)_file_\(fileHash)_\(file.name.sanitizedPath).tmp") .appendingPathComponent("model_\(modelHash)_file_\(fileHash)_\(file.name.sanitizedPath).tmp")
var resumeOffset: Int64 = 0 var resumeOffset: Int64 = 0
@ -346,7 +380,8 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
} }
var downloadedBytes: Int64 = resumeOffset var downloadedBytes: Int64 = resumeOffset
var bytesCount = 0 var buffer = Data()
buffer.reserveCapacity(1024 * 1024) // Reserve 1MB buffer
for try await byte in asyncBytes { for try await byte in asyncBytes {
// Frequently check cancellation status // Frequently check cancellation status
@ -358,17 +393,24 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
return return
} }
try fileHandle.write(contentsOf: [byte]) buffer.append(byte)
downloadedBytes += 1
bytesCount += 1
// Reduce progress callback frequency: update every 64KB * 5 instead of every 1KB // Write in larger chunks to reduce I/O operations
if bytesCount >= 64 * 1024 * 5 { if buffer.count >= 256 * 1024 { // 256KB chunks
try fileHandle.write(contentsOf: buffer)
downloadedBytes += Int64(buffer.count)
onProgress(downloadedBytes) onProgress(downloadedBytes)
bytesCount = 0 buffer.removeAll(keepingCapacity: true)
} }
} }
// Write remaining buffer
if !buffer.isEmpty {
try fileHandle.write(contentsOf: buffer)
downloadedBytes += Int64(buffer.count)
onProgress(downloadedBytes)
}
try fileHandle.close() try fileHandle.close()
self.currentFileHandle = nil self.currentFileHandle = nil
@ -467,11 +509,13 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
ModelDownloadLogger.debug("Downloading: \(file.name)") ModelDownloadLogger.debug("Downloading: \(file.name)")
if !storage.isFileDownloaded(file, at: destinationPath) { if !storage.isFileDownloaded(file, at: destinationPath) {
let fileStartSize = downloadedSize
try await downloadFile( try await downloadFile(
file: file, file: file,
destinationPath: destinationPath, destinationPath: destinationPath,
onProgress: { downloadedBytes in onProgress: { downloadedBytes in
let currentProgress = Double(self.downloadedSize + downloadedBytes) / Double(self.totalSize) // 使 +
let currentProgress = Double(fileStartSize + downloadedBytes) / Double(self.totalSize)
self.updateProgress(currentProgress, callback: progress) self.updateProgress(currentProgress, callback: progress)
}, },
maxRetries: 500, // Can be made configurable maxRetries: 500, // Can be made configurable
@ -683,9 +727,12 @@ public actor ModelScopeDownloadManager: ModelDownloadManagerProtocol {
/// - destinationPath: Destination path used for temp file naming /// - destinationPath: Destination path used for temp file naming
/// - Returns: Size of temporary file in bytes, or 0 if file doesn't exist /// - Returns: Size of temporary file in bytes, or 0 if file doesn't exist
private func getTempFileSize(for file: ModelFile, destinationPath: String) -> Int64 { private func getTempFileSize(for file: ModelFile, destinationPath: String) -> Int64 {
let modelHash = repoPath.hash let modelHash = repoPath.stableHash
let fileHash = file.path.hash let fileHash = file.path.stableHash
let tempURL = FileManager.default.temporaryDirectory
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let downloadsURL = documentsURL.appendingPathComponent(".downloads", isDirectory: true)
let tempURL = downloadsURL
.appendingPathComponent("model_\(modelHash)_file_\(fileHash)_\(file.name.sanitizedPath).tmp") .appendingPathComponent("model_\(modelHash)_file_\(fileHash)_\(file.name.sanitizedPath).tmp")
guard fileManager.fileExists(atPath: tempURL.path) else { guard fileManager.fileExists(atPath: tempURL.path) else {

View File

@ -126,11 +126,14 @@ extension UIImage {
} }
UIGraphicsBeginImageContext(self.size) UIGraphicsBeginImageContext(self.size)
let context = UIGraphicsGetCurrentContext()! guard let context = UIGraphicsGetCurrentContext(), let cgImage = self.cgImage else {
UIGraphicsEndImageContext()
return self
}
context.translateBy(x: self.size.width / 2, y: self.size.height / 2) context.translateBy(x: self.size.width / 2, y: self.size.height / 2)
context.rotate(by: angle) context.rotate(by: angle)
context.scaleBy(x: 1.0, y: -1.0) context.scaleBy(x: 1.0, y: -1.0)
context.draw(self.cgImage!, in: CGRect(x: -self.size.width / 2, y: -self.size.height / 2, width: self.size.width, height: self.size.height)) context.draw(cgImage, in: CGRect(x: -self.size.width / 2, y: -self.size.height / 2, width: self.size.width, height: self.size.height))
let rotatedImage = UIGraphicsGetImageFromCurrentImageContext() let rotatedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext() UIGraphicsEndImageContext()

View File

@ -6,8 +6,23 @@
// //
import Foundation import Foundation
import CryptoKit
extension String { extension String {
/// Generate a stable hash value for temporary file naming
///
/// Unlike the hash property in Swift's standard library, this method
/// generates the same hash value for identical strings across different
/// app launches, ensuring that resumable download functionality works correctly.
///
/// - Returns: A stable hash value based on SHA256
var stableHash: String {
let data = self.data(using: .utf8) ?? Data()
let digest = SHA256.hash(data: data)
return digest.compactMap { String(format: "%02x", $0) }.joined().prefix(16).description
}
func removingTaobaoPrefix() -> String { func removingTaobaoPrefix() -> String {
return self.replacingOccurrences(of: "taobao-mnn/", with: "") return self.replacingOccurrences(of: "taobao-mnn/", with: "")
} }

View File

@ -104,11 +104,9 @@ extension DraftMessage {
.asyncMap { (media : Media) -> (Media, URL?, URL?) in .asyncMap { (media : Media) -> (Media, URL?, URL?) in
(media, await media.getThumbnailURL(), await media.getURL()) (media, await media.getThumbnailURL(), await media.getURL())
} }
.filter { (media: Media, thumb: URL?, full: URL?) -> Bool in .compactMap { media, thumb, full in
thumb != nil && full != nil guard let thumb, let full else { return nil }
} return LLMChatImage(id: media.id.uuidString, thumbnail: thumb, full: full)
.map { media, thumb, full in
LLMChatImage(id: media.id.uuidString, thumbnail: thumb!, full: full!)
} }
} }
@ -118,11 +116,9 @@ extension DraftMessage {
.asyncMap { (media : Media) -> (Media, URL?, URL?) in .asyncMap { (media : Media) -> (Media, URL?, URL?) in
(media, await media.getThumbnailURL(), await media.getURL()) (media, await media.getThumbnailURL(), await media.getURL())
} }
.filter { (media: Media, thumb: URL?, full: URL?) -> Bool in .compactMap { media, thumb, full in
thumb != nil && full != nil guard let thumb, let full else { return nil }
} return LLMChatVideo(id: media.id.uuidString, thumbnail: thumb, full: full)
.map { media, thumb, full in
LLMChatVideo(id: media.id.uuidString, thumbnail: thumb!, full: full!)
} }
} }

View File

@ -63,15 +63,22 @@
2. 编译 MNN.framework: 2. 编译 MNN.framework:
```shell ```shell
cd MNN/ sh package_scripts/ios/buildiOS.sh "
sh package_scripts/ios/buildiOS.sh "-DMNN_ARM82=true -DMNN_LOW_MEMORY=true -DMNN_SUPPORT_TRANSFORMER_FUSE=true -DMNN_BUILD_LLM=true -DMNN_CPU_WEIGHT_DEQUANT_GEMM=true -DMNN_ARM82=ON
-DMNN_LOW_MEMORY=ON
-DMNN_SUPPORT_TRANSFORMER_FUSE=ON
-DMNN_BUILD_LLM=ON
-DMNN_CPU_WEIGHT_DEQUANT_GEMM=ON
-DMNN_METAL=ON -DMNN_METAL=ON
-DMNN_BUILD_DIFFUSION=ON -DMNN_BUILD_DIFFUSION=ON
-DMNN_BUILD_OPENCV=ON
-DMNN_IMGCODECS=ON
-DMNN_OPENCL=OFF -DMNN_OPENCL=OFF
-DMNN_SEP_BUILD=OFF -DMNN_SEP_BUILD=OFF
-DMNN_SUPPORT_TRANSFORMER_FUSE=ON" -DLLM_SUPPORT_AUDIO=ON
-DMNN_BUILD_AUDIO=ON
-DLLM_SUPPORT_VISION=ON
-DMNN_BUILD_OPENCV=ON
-DMNN_IMGCODECS=ON
"
``` ```
3. 拷贝 framework 到 iOS 项目中 3. 拷贝 framework 到 iOS 项目中