2024-10-28 11:34:41 -05:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
2024-11-12 22:47:25 -06:00
|
|
|
"encoding/base64"
|
2024-10-28 11:34:41 -05:00
|
|
|
"fmt"
|
2025-01-10 10:03:53 -06:00
|
|
|
"image"
|
2025-01-13 08:59:29 -06:00
|
|
|
"slices"
|
2024-10-28 11:34:41 -05:00
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
|
2025-01-10 10:03:53 -06:00
|
|
|
_ "image/jpeg"
|
|
|
|
|
2025-01-13 01:31:51 -06:00
|
|
|
"github.com/sirupsen/logrus"
|
2024-10-28 11:34:41 -05:00
|
|
|
"github.com/tmc/langchaingo/llms"
|
|
|
|
)
|
|
|
|
|
2025-01-13 08:59:29 -06:00
|
|
|
// getSuggestedCorrespondent generates a suggested correspondent for a document using the LLM
|
|
|
|
func (app *App) getSuggestedCorrespondent(ctx context.Context, content string, suggestedTitle string, availableCorrespondents []string, correspondentBlackList []string) (string, error) {
|
|
|
|
likelyLanguage := getLikelyLanguage()
|
|
|
|
|
|
|
|
templateMutex.RLock()
|
|
|
|
defer templateMutex.RUnlock()
|
|
|
|
|
2025-02-02 09:14:18 -06:00
|
|
|
// Get available tokens for content
|
|
|
|
templateData := map[string]interface{}{
|
2025-01-13 08:59:29 -06:00
|
|
|
"Language": likelyLanguage,
|
|
|
|
"AvailableCorrespondents": availableCorrespondents,
|
|
|
|
"BlackList": correspondentBlackList,
|
|
|
|
"Title": suggestedTitle,
|
2025-02-02 09:14:18 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
availableTokens, err := getAvailableTokensForContent(correspondentTemplate, templateData)
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error calculating available tokens: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Truncate content if needed
|
|
|
|
truncatedContent, err := truncateContentByTokens(content, availableTokens)
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error truncating content: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Execute template with truncated content
|
|
|
|
var promptBuffer bytes.Buffer
|
|
|
|
templateData["Content"] = truncatedContent
|
|
|
|
err = correspondentTemplate.Execute(&promptBuffer, templateData)
|
2025-01-13 08:59:29 -06:00
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error executing correspondent template: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
prompt := promptBuffer.String()
|
|
|
|
log.Debugf("Correspondent suggestion prompt: %s", prompt)
|
|
|
|
|
|
|
|
completion, err := app.LLM.GenerateContent(ctx, []llms.MessageContent{
|
|
|
|
{
|
|
|
|
Parts: []llms.ContentPart{
|
|
|
|
llms.TextContent{
|
|
|
|
Text: prompt,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Role: llms.ChatMessageTypeHuman,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error getting response from LLM: %v", err)
|
|
|
|
}
|
|
|
|
|
2025-02-05 13:59:08 -06:00
|
|
|
response := stripReasoning(strings.TrimSpace(completion.Choices[0].Content))
|
2025-01-13 08:59:29 -06:00
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
// getSuggestedTags generates suggested tags for a document using the LLM
|
2025-01-13 01:31:51 -06:00
|
|
|
func (app *App) getSuggestedTags(
|
|
|
|
ctx context.Context,
|
|
|
|
content string,
|
|
|
|
suggestedTitle string,
|
|
|
|
availableTags []string,
|
2025-01-13 08:59:29 -06:00
|
|
|
originalTags []string,
|
2025-01-13 01:31:51 -06:00
|
|
|
logger *logrus.Entry) ([]string, error) {
|
2024-10-28 11:34:41 -05:00
|
|
|
likelyLanguage := getLikelyLanguage()
|
|
|
|
|
|
|
|
templateMutex.RLock()
|
|
|
|
defer templateMutex.RUnlock()
|
|
|
|
|
2025-01-13 10:22:54 -06:00
|
|
|
// Remove all paperless-gpt related tags from available tags
|
|
|
|
availableTags = removeTagFromList(availableTags, manualTag)
|
|
|
|
availableTags = removeTagFromList(availableTags, autoTag)
|
|
|
|
availableTags = removeTagFromList(availableTags, autoOcrTag)
|
|
|
|
|
2025-02-02 09:14:18 -06:00
|
|
|
// Get available tokens for content
|
|
|
|
templateData := map[string]interface{}{
|
2024-10-28 11:34:41 -05:00
|
|
|
"Language": likelyLanguage,
|
|
|
|
"AvailableTags": availableTags,
|
2025-01-13 08:59:29 -06:00
|
|
|
"OriginalTags": originalTags,
|
2024-10-28 11:34:41 -05:00
|
|
|
"Title": suggestedTitle,
|
2025-02-02 09:14:18 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
availableTokens, err := getAvailableTokensForContent(tagTemplate, templateData)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error calculating available tokens: %v", err)
|
|
|
|
return nil, fmt.Errorf("error calculating available tokens: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Truncate content if needed
|
|
|
|
truncatedContent, err := truncateContentByTokens(content, availableTokens)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error truncating content: %v", err)
|
|
|
|
return nil, fmt.Errorf("error truncating content: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Execute template with truncated content
|
|
|
|
var promptBuffer bytes.Buffer
|
|
|
|
templateData["Content"] = truncatedContent
|
|
|
|
err = tagTemplate.Execute(&promptBuffer, templateData)
|
2024-10-28 11:34:41 -05:00
|
|
|
if err != nil {
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Errorf("Error executing tag template: %v", err)
|
2024-10-28 11:34:41 -05:00
|
|
|
return nil, fmt.Errorf("error executing tag template: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
prompt := promptBuffer.String()
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Debugf("Tag suggestion prompt: %s", prompt)
|
2024-10-28 11:34:41 -05:00
|
|
|
|
|
|
|
completion, err := app.LLM.GenerateContent(ctx, []llms.MessageContent{
|
|
|
|
{
|
|
|
|
Parts: []llms.ContentPart{
|
|
|
|
llms.TextContent{
|
|
|
|
Text: prompt,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Role: llms.ChatMessageTypeHuman,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Errorf("Error getting response from LLM: %v", err)
|
2024-10-28 11:34:41 -05:00
|
|
|
return nil, fmt.Errorf("error getting response from LLM: %v", err)
|
|
|
|
}
|
|
|
|
|
2025-02-05 13:59:08 -06:00
|
|
|
response := stripReasoning(completion.Choices[0].Content)
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
suggestedTags := strings.Split(response, ",")
|
|
|
|
for i, tag := range suggestedTags {
|
|
|
|
suggestedTags[i] = strings.TrimSpace(tag)
|
|
|
|
}
|
|
|
|
|
2025-01-13 08:59:29 -06:00
|
|
|
// append the original tags to the suggested tags
|
|
|
|
suggestedTags = append(suggestedTags, originalTags...)
|
|
|
|
// Remove duplicates
|
|
|
|
slices.Sort(suggestedTags)
|
|
|
|
suggestedTags = slices.Compact(suggestedTags)
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
// Filter out tags that are not in the available tags list
|
|
|
|
filteredTags := []string{}
|
|
|
|
for _, tag := range suggestedTags {
|
|
|
|
for _, availableTag := range availableTags {
|
|
|
|
if strings.EqualFold(tag, availableTag) {
|
|
|
|
filteredTags = append(filteredTags, availableTag)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return filteredTags, nil
|
|
|
|
}
|
|
|
|
|
2025-01-13 01:31:51 -06:00
|
|
|
func (app *App) doOCRViaLLM(ctx context.Context, jpegBytes []byte, logger *logrus.Entry) (string, error) {
|
2024-10-28 11:34:41 -05:00
|
|
|
templateMutex.RLock()
|
|
|
|
defer templateMutex.RUnlock()
|
|
|
|
likelyLanguage := getLikelyLanguage()
|
|
|
|
|
|
|
|
var promptBuffer bytes.Buffer
|
|
|
|
err := ocrTemplate.Execute(&promptBuffer, map[string]interface{}{
|
|
|
|
"Language": likelyLanguage,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error executing tag template: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
prompt := promptBuffer.String()
|
|
|
|
|
2025-01-10 10:03:53 -06:00
|
|
|
// Log the image dimensions
|
|
|
|
img, _, err := image.Decode(bytes.NewReader(jpegBytes))
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error decoding image: %v", err)
|
|
|
|
}
|
|
|
|
bounds := img.Bounds()
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Debugf("Image dimensions: %dx%d", bounds.Dx(), bounds.Dy())
|
2025-01-10 10:03:53 -06:00
|
|
|
|
2024-11-12 22:47:25 -06:00
|
|
|
// If not OpenAI then use binary part for image, otherwise, use the ImageURL part with encoding from https://platform.openai.com/docs/guides/vision
|
|
|
|
var parts []llms.ContentPart
|
|
|
|
if strings.ToLower(visionLlmProvider) != "openai" {
|
2025-01-10 10:03:53 -06:00
|
|
|
// Log image size in kilobytes
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Debugf("Image size: %d KB", len(jpegBytes)/1024)
|
2024-11-12 22:47:25 -06:00
|
|
|
parts = []llms.ContentPart{
|
|
|
|
llms.BinaryPart("image/jpeg", jpegBytes),
|
|
|
|
llms.TextPart(prompt),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
base64Image := base64.StdEncoding.EncodeToString(jpegBytes)
|
2025-01-10 10:03:53 -06:00
|
|
|
// Log image size in kilobytes
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Debugf("Image size: %d KB", len(base64Image)/1024)
|
2024-11-12 22:47:25 -06:00
|
|
|
parts = []llms.ContentPart{
|
|
|
|
llms.ImageURLPart(fmt.Sprintf("data:image/jpeg;base64,%s", base64Image)),
|
|
|
|
llms.TextPart(prompt),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
// Convert the image to text
|
|
|
|
completion, err := app.VisionLLM.GenerateContent(ctx, []llms.MessageContent{
|
|
|
|
{
|
2024-11-12 22:47:25 -06:00
|
|
|
Parts: parts,
|
|
|
|
Role: llms.ChatMessageTypeHuman,
|
2024-10-28 11:34:41 -05:00
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error getting response from LLM: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
result := completion.Choices[0].Content
|
|
|
|
fmt.Println(result)
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// getSuggestedTitle generates a suggested title for a document using the LLM
|
2025-01-27 02:33:12 -06:00
|
|
|
func (app *App) getSuggestedTitle(ctx context.Context, content string, originalTitle string, logger *logrus.Entry) (string, error) {
|
2024-10-28 11:34:41 -05:00
|
|
|
likelyLanguage := getLikelyLanguage()
|
|
|
|
|
|
|
|
templateMutex.RLock()
|
|
|
|
defer templateMutex.RUnlock()
|
|
|
|
|
2025-02-02 09:14:18 -06:00
|
|
|
// Get available tokens for content
|
|
|
|
templateData := map[string]interface{}{
|
2024-10-28 11:34:41 -05:00
|
|
|
"Language": likelyLanguage,
|
|
|
|
"Content": content,
|
2025-01-27 02:33:12 -06:00
|
|
|
"Title": originalTitle,
|
2025-02-02 09:14:18 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
availableTokens, err := getAvailableTokensForContent(titleTemplate, templateData)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error calculating available tokens: %v", err)
|
|
|
|
return "", fmt.Errorf("error calculating available tokens: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Truncate content if needed
|
|
|
|
truncatedContent, err := truncateContentByTokens(content, availableTokens)
|
|
|
|
if err != nil {
|
|
|
|
logger.Errorf("Error truncating content: %v", err)
|
|
|
|
return "", fmt.Errorf("error truncating content: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Execute template with truncated content
|
|
|
|
var promptBuffer bytes.Buffer
|
|
|
|
templateData["Content"] = truncatedContent
|
|
|
|
err = titleTemplate.Execute(&promptBuffer, templateData)
|
2025-02-05 13:59:08 -06:00
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error executing title template: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
prompt := promptBuffer.String()
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Debugf("Title suggestion prompt: %s", prompt)
|
2024-10-28 11:34:41 -05:00
|
|
|
|
|
|
|
completion, err := app.LLM.GenerateContent(ctx, []llms.MessageContent{
|
|
|
|
{
|
|
|
|
Parts: []llms.ContentPart{
|
|
|
|
llms.TextContent{
|
|
|
|
Text: prompt,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Role: llms.ChatMessageTypeHuman,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("error getting response from LLM: %v", err)
|
|
|
|
}
|
2025-02-05 13:59:08 -06:00
|
|
|
result := stripReasoning(completion.Choices[0].Content)
|
|
|
|
return strings.TrimSpace(strings.Trim(result, "\"")), nil
|
2024-10-28 11:34:41 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// generateDocumentSuggestions generates suggestions for a set of documents
|
2025-01-13 01:31:51 -06:00
|
|
|
func (app *App) generateDocumentSuggestions(ctx context.Context, suggestionRequest GenerateSuggestionsRequest, logger *logrus.Entry) ([]DocumentSuggestion, error) {
|
2024-10-28 11:34:41 -05:00
|
|
|
// Fetch all available tags from paperless-ngx
|
|
|
|
availableTagsMap, err := app.Client.GetAllTags(ctx)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("failed to fetch available tags: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Prepare a list of tag names
|
|
|
|
availableTagNames := make([]string, 0, len(availableTagsMap))
|
|
|
|
for tagName := range availableTagsMap {
|
|
|
|
if tagName == manualTag {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
availableTagNames = append(availableTagNames, tagName)
|
|
|
|
}
|
|
|
|
|
2025-01-13 08:59:29 -06:00
|
|
|
// Prepare a list of document correspodents
|
|
|
|
availableCorrespondentsMap, err := app.Client.GetAllCorrespondents(ctx)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("failed to fetch available correspondents: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Prepare a list of correspondent names
|
|
|
|
availableCorrespondentNames := make([]string, 0, len(availableCorrespondentsMap))
|
|
|
|
for correspondentName := range availableCorrespondentsMap {
|
|
|
|
availableCorrespondentNames = append(availableCorrespondentNames, correspondentName)
|
|
|
|
}
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
documents := suggestionRequest.Documents
|
|
|
|
documentSuggestions := []DocumentSuggestion{}
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
var mu sync.Mutex
|
|
|
|
errorsList := make([]error, 0)
|
|
|
|
|
|
|
|
for i := range documents {
|
|
|
|
wg.Add(1)
|
|
|
|
go func(doc Document) {
|
|
|
|
defer wg.Done()
|
|
|
|
documentID := doc.ID
|
2025-01-13 01:31:51 -06:00
|
|
|
docLogger := documentLogger(documentID)
|
|
|
|
docLogger.Printf("Processing Document ID %d...", documentID)
|
2024-10-28 11:34:41 -05:00
|
|
|
|
|
|
|
content := doc.Content
|
2025-01-27 02:10:53 -06:00
|
|
|
suggestedTitle := doc.Title
|
2024-10-28 11:34:41 -05:00
|
|
|
var suggestedTags []string
|
2025-01-13 08:59:29 -06:00
|
|
|
var suggestedCorrespondent string
|
2024-10-28 11:34:41 -05:00
|
|
|
|
|
|
|
if suggestionRequest.GenerateTitles {
|
2025-01-27 02:10:53 -06:00
|
|
|
suggestedTitle, err = app.getSuggestedTitle(ctx, content, suggestedTitle, docLogger)
|
2024-10-28 11:34:41 -05:00
|
|
|
if err != nil {
|
|
|
|
mu.Lock()
|
|
|
|
errorsList = append(errorsList, fmt.Errorf("Document %d: %v", documentID, err))
|
|
|
|
mu.Unlock()
|
2025-01-13 01:31:51 -06:00
|
|
|
docLogger.Errorf("Error processing document %d: %v", documentID, err)
|
2024-10-28 11:34:41 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if suggestionRequest.GenerateTags {
|
2025-01-13 08:59:29 -06:00
|
|
|
suggestedTags, err = app.getSuggestedTags(ctx, content, suggestedTitle, availableTagNames, doc.Tags, docLogger)
|
2024-10-28 11:34:41 -05:00
|
|
|
if err != nil {
|
|
|
|
mu.Lock()
|
|
|
|
errorsList = append(errorsList, fmt.Errorf("Document %d: %v", documentID, err))
|
|
|
|
mu.Unlock()
|
2025-01-13 01:31:51 -06:00
|
|
|
logger.Errorf("Error generating tags for document %d: %v", documentID, err)
|
2024-10-28 11:34:41 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-01-13 08:59:29 -06:00
|
|
|
if suggestionRequest.GenerateCorrespondents {
|
|
|
|
suggestedCorrespondent, err = app.getSuggestedCorrespondent(ctx, content, suggestedTitle, availableCorrespondentNames, correspondentBlackList)
|
|
|
|
if err != nil {
|
|
|
|
mu.Lock()
|
|
|
|
errorsList = append(errorsList, fmt.Errorf("Document %d: %v", documentID, err))
|
|
|
|
mu.Unlock()
|
|
|
|
log.Errorf("Error generating correspondents for document %d: %v", documentID, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
mu.Lock()
|
|
|
|
suggestion := DocumentSuggestion{
|
|
|
|
ID: documentID,
|
|
|
|
OriginalDocument: doc,
|
|
|
|
}
|
|
|
|
// Titles
|
|
|
|
if suggestionRequest.GenerateTitles {
|
2025-01-13 01:31:51 -06:00
|
|
|
docLogger.Printf("Suggested title for document %d: %s", documentID, suggestedTitle)
|
2024-10-28 11:34:41 -05:00
|
|
|
suggestion.SuggestedTitle = suggestedTitle
|
|
|
|
} else {
|
|
|
|
suggestion.SuggestedTitle = doc.Title
|
|
|
|
}
|
|
|
|
|
|
|
|
// Tags
|
|
|
|
if suggestionRequest.GenerateTags {
|
2025-01-13 01:31:51 -06:00
|
|
|
docLogger.Printf("Suggested tags for document %d: %v", documentID, suggestedTags)
|
2024-10-28 11:34:41 -05:00
|
|
|
suggestion.SuggestedTags = suggestedTags
|
|
|
|
} else {
|
2025-01-13 03:52:56 -06:00
|
|
|
suggestion.SuggestedTags = doc.Tags
|
2024-10-28 11:34:41 -05:00
|
|
|
}
|
2025-01-13 03:52:56 -06:00
|
|
|
|
2025-01-13 08:59:29 -06:00
|
|
|
// Correspondents
|
|
|
|
if suggestionRequest.GenerateCorrespondents {
|
|
|
|
log.Printf("Suggested correspondent for document %d: %s", documentID, suggestedCorrespondent)
|
|
|
|
suggestion.SuggestedCorrespondent = suggestedCorrespondent
|
|
|
|
} else {
|
|
|
|
suggestion.SuggestedCorrespondent = ""
|
|
|
|
}
|
2025-01-13 03:52:56 -06:00
|
|
|
// Remove manual tag from the list of suggested tags
|
|
|
|
suggestion.RemoveTags = []string{manualTag, autoTag}
|
|
|
|
|
2024-10-28 11:34:41 -05:00
|
|
|
documentSuggestions = append(documentSuggestions, suggestion)
|
|
|
|
mu.Unlock()
|
2025-01-13 01:31:51 -06:00
|
|
|
docLogger.Printf("Document %d processed successfully.", documentID)
|
2024-10-28 11:34:41 -05:00
|
|
|
}(documents[i])
|
|
|
|
}
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
if len(errorsList) > 0 {
|
|
|
|
return nil, errorsList[0] // Return the first error encountered
|
|
|
|
}
|
|
|
|
|
|
|
|
return documentSuggestions, nil
|
|
|
|
}
|
2025-02-05 13:59:08 -06:00
|
|
|
|
|
|
|
// stripReasoning removes the reasoning from the content indicated by <think> and </think> tags.
|
|
|
|
func stripReasoning(content string) string {
|
|
|
|
// Remove reasoning from the content
|
|
|
|
reasoningStart := strings.Index(content, "<think>")
|
|
|
|
if reasoningStart != -1 {
|
|
|
|
reasoningEnd := strings.Index(content, "</think>")
|
|
|
|
if reasoningEnd != -1 {
|
|
|
|
content = content[:reasoningStart] + content[reasoningEnd+len("</think>"):]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Trim whitespace
|
|
|
|
content = strings.TrimSpace(content)
|
|
|
|
return content
|
|
|
|
}
|