2025-02-10 19:39:48 +08:00
|
|
|
//
|
|
|
|
// LLMChatViewModel.swift
|
|
|
|
// MNNLLMiOS
|
|
|
|
// Created by 游薪渝(揽清) on 2025/1/8.
|
|
|
|
//
|
|
|
|
|
|
|
|
import Combine
|
|
|
|
import SwiftUI
|
|
|
|
import AVFoundation
|
|
|
|
|
|
|
|
import ExyteChat
|
|
|
|
|
|
|
|
final class LLMChatViewModel: ObservableObject {
|
|
|
|
|
|
|
|
private var llm: LLMInferenceEngineWrapper?
|
2025-03-10 17:10:40 +08:00
|
|
|
private var diffusion: DiffusionSession?
|
2025-02-10 19:39:48 +08:00
|
|
|
private let llmState = LLMState()
|
|
|
|
|
|
|
|
@Published var messages: [Message] = []
|
|
|
|
@Published var isModelLoaded = false
|
|
|
|
@Published var isProcessing: Bool = false
|
2025-07-21 16:06:09 +08:00
|
|
|
@Published var currentStreamingMessageId: String? = nil
|
2025-02-10 19:39:48 +08:00
|
|
|
|
2025-03-19 16:12:14 +08:00
|
|
|
@Published var useMmap: Bool = false
|
|
|
|
|
2025-08-13 17:41:24 +08:00
|
|
|
// MARK: - Think Mode Properties
|
|
|
|
@Published var isThinkingModeEnabled: Bool = true
|
|
|
|
@Published var supportsThinkingMode: Bool = false
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
var chatInputUnavilable: Bool {
|
|
|
|
if isModelLoaded == false || isProcessing == true {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
var chatStatus: String {
|
|
|
|
if isModelLoaded {
|
|
|
|
if isProcessing {
|
|
|
|
"Processing..."
|
|
|
|
} else {
|
|
|
|
"Ready"
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
"Model Loading..."
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var chatCover: URL? {
|
2025-09-05 15:22:05 +08:00
|
|
|
interactor.otherSenders.count == 1 ? interactor.otherSenders.first?.avatar : nil
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
private let interactor: LLMChatInteractor
|
|
|
|
private var subscriptions = Set<AnyCancellable>()
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
var modelInfo: ModelInfo
|
|
|
|
var history: ChatHistory?
|
|
|
|
private var historyId: String
|
|
|
|
|
2025-03-19 16:12:14 +08:00
|
|
|
let modelConfigManager: ModelConfigManager
|
2025-03-11 16:45:31 +08:00
|
|
|
|
|
|
|
var isDiffusionModel: Bool {
|
2025-07-10 11:16:25 +08:00
|
|
|
return modelInfo.modelName.lowercased().contains("diffusion")
|
2025-03-11 16:45:31 +08:00
|
|
|
}
|
2025-02-17 17:33:03 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
init(modelInfo: ModelInfo, history: ChatHistory? = nil) {
|
2025-09-05 15:22:05 +08:00
|
|
|
print("yxy:: LLMChat View Model init")
|
2025-02-10 19:39:48 +08:00
|
|
|
self.modelInfo = modelInfo
|
|
|
|
self.history = history
|
|
|
|
self.historyId = history?.id ?? UUID().uuidString
|
|
|
|
let messages = self.history?.messages
|
|
|
|
self.interactor = LLMChatInteractor(modelInfo: modelInfo, historyMessages: messages)
|
2025-02-17 17:33:03 +08:00
|
|
|
|
2025-03-19 16:12:14 +08:00
|
|
|
self.modelConfigManager = ModelConfigManager(modelPath: modelInfo.localPath)
|
|
|
|
|
|
|
|
self.useMmap = self.modelConfigManager.readUseMmap()
|
2025-08-13 17:41:24 +08:00
|
|
|
|
|
|
|
// Check if model supports thinking mode
|
2025-08-19 14:44:51 +08:00
|
|
|
self.supportsThinkingMode = ModelUtils.isSupportThinkingSwitch(modelInfo.tags, modelName: modelInfo.modelName)
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
deinit {
|
|
|
|
print("yxy:: LLMChat View Model deinit")
|
2025-07-30 16:40:13 +08:00
|
|
|
|
|
|
|
llm?.cancelInference()
|
|
|
|
llm = nil
|
|
|
|
diffusion = nil
|
|
|
|
print("yxy:: LLMChat View Model cleanup complete")
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
|
2025-08-13 17:41:24 +08:00
|
|
|
// MARK: - Think Mode Methods
|
|
|
|
|
|
|
|
/// Toggle thinking mode on/off
|
|
|
|
func toggleThinkingMode() {
|
|
|
|
guard supportsThinkingMode else { return }
|
|
|
|
|
|
|
|
isThinkingModeEnabled.toggle()
|
|
|
|
|
2025-08-19 14:44:51 +08:00
|
|
|
self.configureThinkingMode()
|
2025-08-13 17:41:24 +08:00
|
|
|
|
|
|
|
print("Think mode toggled to: \(isThinkingModeEnabled)")
|
|
|
|
}
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
func setupLLM(modelPath: String) {
|
|
|
|
Task { @MainActor in
|
2025-09-03 17:23:52 +08:00
|
|
|
self.isModelLoaded = false
|
2025-02-10 19:39:48 +08:00
|
|
|
self.send(draft: DraftMessage(
|
|
|
|
text: NSLocalizedString("ModelLoadingText", comment: ""),
|
2025-02-12 11:10:29 +08:00
|
|
|
thinkText: "",
|
2025-02-10 19:39:48 +08:00
|
|
|
medias: [],
|
|
|
|
recording: nil,
|
|
|
|
replyMessage: nil,
|
|
|
|
createdAt: Date()
|
|
|
|
), userType: .system)
|
|
|
|
}
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-07-10 11:16:25 +08:00
|
|
|
if modelInfo.modelName.lowercased().contains("diffusion") {
|
2025-03-10 17:10:40 +08:00
|
|
|
diffusion = DiffusionSession(modelPath: modelPath, completion: { [weak self] success in
|
|
|
|
Task { @MainActor in
|
|
|
|
print("Diffusion Model \(success)")
|
|
|
|
self?.sendModelLoadStatus(success: success)
|
2025-09-03 17:23:52 +08:00
|
|
|
self?.isModelLoaded = success
|
2025-03-10 17:10:40 +08:00
|
|
|
}
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
llm = LLMInferenceEngineWrapper(modelPath: modelPath) { [weak self] success in
|
|
|
|
Task { @MainActor in
|
|
|
|
self?.sendModelLoadStatus(success: success)
|
|
|
|
self?.processHistoryMessages()
|
2025-09-03 17:23:52 +08:00
|
|
|
self?.isModelLoaded = success
|
2025-08-13 17:41:24 +08:00
|
|
|
|
|
|
|
// Configure thinking mode after model is loaded
|
|
|
|
if success {
|
|
|
|
self?.configureThinkingMode()
|
|
|
|
}
|
2025-03-10 17:10:40 +08:00
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-08-13 17:41:24 +08:00
|
|
|
/// Configure thinking mode after model loading
|
|
|
|
private func configureThinkingMode() {
|
|
|
|
guard let llm = llm, supportsThinkingMode else { return }
|
|
|
|
|
2025-08-19 14:44:51 +08:00
|
|
|
if self.supportsThinkingMode {
|
|
|
|
llm.setThinkingModeEnabled(isThinkingModeEnabled)
|
|
|
|
}
|
2025-08-13 17:41:24 +08:00
|
|
|
|
2025-08-22 17:11:24 +08:00
|
|
|
interactor.isThinkingModeEnabled = isThinkingModeEnabled
|
|
|
|
|
2025-08-13 17:41:24 +08:00
|
|
|
print("Thinking mode configured: \(isThinkingModeEnabled)")
|
|
|
|
}
|
|
|
|
|
2025-03-10 17:10:40 +08:00
|
|
|
private func sendModelLoadStatus(success: Bool) {
|
|
|
|
let modelLoadSuccessText = NSLocalizedString("ModelLoadingSuccessText", comment: "")
|
|
|
|
let modelLoadFailText = NSLocalizedString("ModelLoadingFailText", comment: "")
|
|
|
|
let loadResult = success ? modelLoadSuccessText : modelLoadFailText
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-03-10 17:10:40 +08:00
|
|
|
self.send(draft: DraftMessage(
|
|
|
|
text: loadResult,
|
|
|
|
thinkText: "",
|
|
|
|
medias: [],
|
|
|
|
recording: nil,
|
|
|
|
replyMessage: nil,
|
|
|
|
createdAt: Date()
|
|
|
|
), userType: .system)
|
|
|
|
}
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
private func processHistoryMessages() {
|
|
|
|
guard let history = self.history else { return }
|
|
|
|
|
|
|
|
let historyPrompts = history.messages.flatMap { msg -> [[String: String]] in
|
|
|
|
var prompts: [[String: String]] = []
|
|
|
|
let sender = msg.isUser ? "user" : "assistant"
|
|
|
|
|
|
|
|
prompts.append([sender: msg.content])
|
|
|
|
|
|
|
|
if let images = msg.images {
|
|
|
|
let imgStr = images.map { "<img>\($0.full.path)</img>" }.joined()
|
|
|
|
prompts.append([sender: imgStr])
|
|
|
|
}
|
|
|
|
|
|
|
|
if let audio = msg.audio, let url = audio.url {
|
|
|
|
prompts.append([sender: "<audio>\(url.path)</audio>"])
|
|
|
|
}
|
|
|
|
|
|
|
|
return prompts
|
|
|
|
}
|
|
|
|
|
|
|
|
let nsArray = historyPrompts as [[AnyHashable: Any]]
|
|
|
|
llm?.addPrompts(from: nsArray)
|
|
|
|
}
|
|
|
|
|
|
|
|
func sendToLLM(draft: DraftMessage) {
|
2025-08-01 14:07:18 +08:00
|
|
|
|
2025-07-31 20:55:12 +08:00
|
|
|
NotificationCenter.default.post(name: .dismissKeyboard, object: nil)
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
self.send(draft: draft, userType: .user)
|
2025-09-03 19:25:49 +08:00
|
|
|
|
|
|
|
recordModelUsage()
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
if isModelLoaded {
|
2025-07-10 11:16:25 +08:00
|
|
|
if modelInfo.modelName.lowercased().contains("diffusion") {
|
2025-03-10 17:10:40 +08:00
|
|
|
self.getDiffusionResponse(draft: draft)
|
|
|
|
} else {
|
|
|
|
self.getLLMRespsonse(draft: draft)
|
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func send(draft: DraftMessage, userType: UserType) {
|
|
|
|
interactor.send(draftMessage: draft, userType: userType)
|
|
|
|
}
|
|
|
|
|
2025-03-10 17:10:40 +08:00
|
|
|
func getDiffusionResponse(draft: DraftMessage) {
|
|
|
|
|
|
|
|
Task {
|
|
|
|
|
2025-07-10 14:45:59 +08:00
|
|
|
let tempImagePath = FileOperationManager.shared.generateTempImagePath().path
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-03-10 17:10:40 +08:00
|
|
|
var lastProcess:Int32 = 0
|
|
|
|
|
|
|
|
self.send(draft: DraftMessage(text: "Start Generating Image...", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .assistant)
|
|
|
|
|
2025-03-11 16:45:31 +08:00
|
|
|
// 获取用户设置的迭代次数和种子值
|
2025-03-19 16:12:14 +08:00
|
|
|
let userIterations = self.modelConfigManager.readIterations()
|
|
|
|
let userSeed = self.modelConfigManager.readSeed()
|
2025-03-11 16:45:31 +08:00
|
|
|
|
2025-09-05 15:22:05 +08:00
|
|
|
diffusion?.run(withPrompt: draft.text,
|
|
|
|
imagePath: tempImagePath,
|
|
|
|
iterations: Int32(userIterations),
|
|
|
|
seed: Int32(userSeed),
|
|
|
|
progressCallback: { [weak self] progress in
|
2025-08-19 15:22:25 +08:00
|
|
|
guard let self = self else { return }
|
2025-03-10 17:10:40 +08:00
|
|
|
if progress == 100 {
|
|
|
|
self.send(draft: DraftMessage(text: "Image generated successfully!", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .system)
|
2025-09-05 15:22:05 +08:00
|
|
|
self.interactor.sendImage(imageURL: URL(fileURLWithPath: tempImagePath))
|
2025-03-10 17:10:40 +08:00
|
|
|
} else if ((progress - lastProcess) > 20) {
|
|
|
|
lastProcess = progress
|
|
|
|
self.send(draft: DraftMessage(text: "Generating Image \(progress)%", thinkText: "", medias: [], recording: nil, replyMessage: nil, createdAt: Date()), userType: .system)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
|
|
|
|
func getLLMRespsonse(draft: DraftMessage) {
|
|
|
|
Task {
|
|
|
|
await llmState.setProcessing(true)
|
2025-09-05 15:22:05 +08:00
|
|
|
await MainActor.run {
|
2025-07-07 16:17:57 +08:00
|
|
|
self.isProcessing = true
|
|
|
|
let emptyMessage = DraftMessage(
|
|
|
|
text: "",
|
|
|
|
thinkText: "",
|
|
|
|
medias: [],
|
|
|
|
recording: nil,
|
|
|
|
replyMessage: nil,
|
|
|
|
createdAt: Date()
|
|
|
|
)
|
|
|
|
self.send(draft: emptyMessage, userType: .assistant)
|
|
|
|
if let lastMessage = self.messages.last {
|
|
|
|
self.currentStreamingMessageId = lastMessage.id
|
|
|
|
}
|
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
|
|
|
|
var content = draft.text
|
|
|
|
let medias = draft.medias
|
|
|
|
|
|
|
|
// MARK: Add image
|
|
|
|
for media in medias {
|
|
|
|
guard media.type == .image, let url = await media.getURL() else {
|
|
|
|
continue
|
|
|
|
}
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
let fileName = url.lastPathComponent
|
|
|
|
|
2025-07-10 14:45:59 +08:00
|
|
|
if let processedUrl = FileOperationManager.shared.processImageFile(from: url, fileName: fileName) {
|
|
|
|
content = "<img>\(processedUrl.path)</img>" + content
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if let audio = draft.recording, let path = audio.url {
|
2025-09-05 15:22:05 +08:00
|
|
|
// if let wavFile = await convertACCToWAV(accFileUrl: path) {
|
2025-02-12 16:29:48 +08:00
|
|
|
content = "<audio>\(path.path)</audio>" + content
|
2025-09-05 15:22:05 +08:00
|
|
|
// }
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
let convertedContent = self.convertDeepSeekMutliChat(content: content)
|
|
|
|
|
2025-07-07 16:17:57 +08:00
|
|
|
await llmState.processContent(convertedContent, llm: self.llm, showPerformance: true) { [weak self] output in
|
|
|
|
guard let self = self else { return }
|
|
|
|
|
|
|
|
if output.contains("<eop>") {
|
2025-08-19 15:22:25 +08:00
|
|
|
|
2025-07-07 16:17:57 +08:00
|
|
|
Task {
|
2025-08-19 15:22:25 +08:00
|
|
|
await UIUpdateOptimizer.shared.forceFlush { [weak self] finalOutput in
|
|
|
|
guard let self = self else { return }
|
2025-07-07 16:17:57 +08:00
|
|
|
if !finalOutput.isEmpty {
|
|
|
|
self.send(draft: DraftMessage(
|
|
|
|
text: finalOutput,
|
|
|
|
thinkText: "",
|
|
|
|
medias: [],
|
|
|
|
recording: nil,
|
|
|
|
replyMessage: nil,
|
|
|
|
createdAt: Date()
|
|
|
|
), userType: .assistant)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
await MainActor.run {
|
|
|
|
self.isProcessing = false
|
|
|
|
self.currentStreamingMessageId = nil
|
2025-07-31 20:55:12 +08:00
|
|
|
|
|
|
|
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
|
|
|
|
NotificationCenter.default.post(name: .dismissKeyboard, object: nil)
|
|
|
|
}
|
2025-07-07 16:17:57 +08:00
|
|
|
}
|
|
|
|
await self.llmState.setProcessing(false)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2025-08-19 15:22:25 +08:00
|
|
|
|
2025-09-05 15:22:05 +08:00
|
|
|
Task {
|
2025-08-19 15:22:25 +08:00
|
|
|
await UIUpdateOptimizer.shared.addUpdate(output) { [weak self] output in
|
|
|
|
guard let self = self else { return }
|
2025-07-07 16:17:57 +08:00
|
|
|
self.send(draft: DraftMessage(
|
2025-02-10 19:39:48 +08:00
|
|
|
text: output,
|
2025-02-12 11:10:29 +08:00
|
|
|
thinkText: "",
|
2025-02-10 19:39:48 +08:00
|
|
|
medias: [],
|
|
|
|
recording: nil,
|
|
|
|
replyMessage: nil,
|
|
|
|
createdAt: Date()
|
|
|
|
), userType: .assistant)
|
|
|
|
}
|
|
|
|
}
|
2025-07-07 16:17:57 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-03-19 17:03:51 +08:00
|
|
|
func setModelConfig() {
|
|
|
|
if let configStr = self.modelConfigManager.readConfigAsJSONString(), let llm = self.llm {
|
|
|
|
llm.setConfigWithJSONString(configStr)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
private func convertDeepSeekMutliChat(content: String) -> String {
|
2025-07-10 11:16:25 +08:00
|
|
|
if self.modelInfo.modelName.lowercased().contains("deepseek") {
|
2025-02-10 19:39:48 +08:00
|
|
|
/* formate:: <|begin_of_sentence|><|User|>{text}<|Assistant|>{text}<|end_of_sentence|>
|
|
|
|
<|User|>{text}<|Assistant|>{text}<|end_of_sentence|>
|
|
|
|
*/
|
|
|
|
var deepSeekContent = "<|begin_of_sentence|>"
|
2025-02-12 11:10:29 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
for message in messages {
|
2025-02-12 11:10:29 +08:00
|
|
|
let senderTag: String
|
|
|
|
switch message.user.id {
|
|
|
|
case "1":
|
|
|
|
senderTag = "<|User|>"
|
|
|
|
case "2":
|
|
|
|
senderTag = "<|Assistant|>"
|
|
|
|
default:
|
|
|
|
continue
|
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
deepSeekContent += "\(senderTag)\(message.text)"
|
|
|
|
}
|
2025-02-12 11:10:29 +08:00
|
|
|
|
|
|
|
deepSeekContent += "<|end_of_sentence|><think><\n>"
|
2025-02-17 11:10:45 +08:00
|
|
|
print(deepSeekContent)
|
2025-02-10 19:39:48 +08:00
|
|
|
return deepSeekContent
|
|
|
|
} else {
|
|
|
|
return content
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-07-10 14:45:59 +08:00
|
|
|
// MARK: - Public Methods for File Operations
|
|
|
|
|
|
|
|
/// Cleans the model temporary folder using FileOperationManager
|
|
|
|
func cleanModelTmpFolder() {
|
|
|
|
FileOperationManager.shared.cleanModelTempFolder(modelPath: modelInfo.localPath)
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func onStart() {
|
2025-08-19 15:22:25 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
interactor.messages
|
2025-08-19 15:22:25 +08:00
|
|
|
.map { messages in
|
2025-02-10 19:39:48 +08:00
|
|
|
messages.map { $0.toChatMessage() }
|
|
|
|
}
|
2025-08-19 15:22:25 +08:00
|
|
|
.sink { messages in
|
|
|
|
self.messages = messages
|
|
|
|
}
|
|
|
|
.store(in: &subscriptions)
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
interactor.connect()
|
|
|
|
|
|
|
|
self.setupLLM(modelPath: self.modelInfo.localPath)
|
2025-09-03 19:25:49 +08:00
|
|
|
|
|
|
|
recordModelUsage()
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
func onStop() {
|
2025-09-03 19:25:49 +08:00
|
|
|
|
|
|
|
recordModelUsage()
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
ChatHistoryManager.shared.saveChat(
|
|
|
|
historyId: historyId,
|
2025-08-29 16:52:42 +08:00
|
|
|
modelInfo: modelInfo,
|
2025-02-10 19:39:48 +08:00
|
|
|
messages: messages
|
|
|
|
)
|
|
|
|
|
2025-08-19 10:44:27 +08:00
|
|
|
subscriptions.removeAll()
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
interactor.disconnect()
|
2025-07-30 16:40:13 +08:00
|
|
|
|
|
|
|
llm?.cancelInference()
|
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
llm = nil
|
2025-07-10 14:45:59 +08:00
|
|
|
|
|
|
|
FileOperationManager.shared.cleanTempDirectories()
|
|
|
|
if !useMmap {
|
|
|
|
FileOperationManager.shared.cleanModelTempFolder(modelPath: modelInfo.localPath)
|
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|
2025-09-05 15:22:05 +08:00
|
|
|
|
2025-02-10 19:39:48 +08:00
|
|
|
func loadMoreMessage(before message: Message) {
|
|
|
|
interactor.loadNextPage()
|
|
|
|
.sink { _ in }
|
|
|
|
.store(in: &subscriptions)
|
|
|
|
}
|
2025-09-03 19:25:49 +08:00
|
|
|
|
|
|
|
private func recordModelUsage() {
|
|
|
|
ModelStorageManager.shared.updateLastUsed(for: modelInfo.modelName)
|
|
|
|
|
|
|
|
NotificationCenter.default.post(
|
|
|
|
name: .modelUsageUpdated,
|
|
|
|
object: nil,
|
|
|
|
userInfo: ["modelName": modelInfo.modelName]
|
|
|
|
)
|
|
|
|
}
|
2025-02-10 19:39:48 +08:00
|
|
|
}
|