mirror of https://github.com/chaitin/PandaWiki.git
Compare commits
1 Commits
5ba524b19f
...
e4da972c68
| Author | SHA1 | Date |
|---|---|---|
|
|
e4da972c68 |
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"math"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
@ -226,8 +227,7 @@ func (u *LLMUsecase) SummaryNode(ctx context.Context, model *domain.Model, name,
|
||||||
return "", fmt.Errorf("failed to generate summary for document %s", name)
|
return "", fmt.Errorf("failed to generate summary for document %s", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
joined := strings.Join(summaries, "\n\n")
|
return u.reduceSummaries(ctx, chatModel, name, summaries)
|
||||||
return u.requestSummary(ctx, chatModel, name, joined)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *LLMUsecase) trimThinking(summary string) string {
|
func (u *LLMUsecase) trimThinking(summary string) string {
|
||||||
|
|
@ -258,6 +258,39 @@ func (u *LLMUsecase) requestSummary(ctx context.Context, chatModel model.BaseCha
|
||||||
return strings.TrimSpace(u.trimThinking(summary)), nil
|
return strings.TrimSpace(u.trimThinking(summary)), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (u *LLMUsecase) reduceSummaries(ctx context.Context, chatModel model.BaseChatModel, name string, summaries []string) (string, error) {
|
||||||
|
current := summaries
|
||||||
|
for len(current) > 0 {
|
||||||
|
joined := strings.Join(current, "\n\n")
|
||||||
|
chunks, err := u.SplitByTokenLimit(joined, summaryChunkTokenLimit)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if len(chunks) == 1 {
|
||||||
|
return u.requestSummary(ctx, chatModel, name, chunks[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
next := make([]string, 0, len(chunks))
|
||||||
|
for idx, chunk := range chunks {
|
||||||
|
summary, err := u.requestSummary(ctx, chatModel, name, chunk)
|
||||||
|
if err != nil {
|
||||||
|
u.logger.Error("Failed to reduce summary chunk", log.Int("reduce_chunk_index", idx), log.Error(err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if summary == "" {
|
||||||
|
u.logger.Warn("Empty summary returned while reducing", log.Int("reduce_chunk_index", idx))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
next = append(next, summary)
|
||||||
|
}
|
||||||
|
if len(next) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
current = next
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("failed to consolidate summary for document %s", name)
|
||||||
|
}
|
||||||
|
|
||||||
func (u *LLMUsecase) SplitByTokenLimit(text string, maxTokens int) ([]string, error) {
|
func (u *LLMUsecase) SplitByTokenLimit(text string, maxTokens int) ([]string, error) {
|
||||||
if maxTokens <= 0 {
|
if maxTokens <= 0 {
|
||||||
return nil, fmt.Errorf("maxTokens must be greater than 0")
|
return nil, fmt.Errorf("maxTokens must be greater than 0")
|
||||||
|
|
|
||||||
|
|
@ -707,7 +707,6 @@ const AiQaContent: React.FC<{
|
||||||
alt='logo'
|
alt='logo'
|
||||||
width={46}
|
width={46}
|
||||||
height={46}
|
height={46}
|
||||||
unoptimized
|
|
||||||
style={{
|
style={{
|
||||||
objectFit: 'contain',
|
objectFit: 'contain',
|
||||||
}}
|
}}
|
||||||
|
|
|
||||||
|
|
@ -209,7 +209,6 @@ const SearchDocContent: React.FC<SearchDocContentProps> = ({
|
||||||
alt='logo'
|
alt='logo'
|
||||||
width={46}
|
width={46}
|
||||||
height={46}
|
height={46}
|
||||||
unoptimized
|
|
||||||
style={{
|
style={{
|
||||||
objectFit: 'contain',
|
objectFit: 'contain',
|
||||||
}}
|
}}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue