diff --git a/README.md b/README.md index 79930c2..d0dbb6b 100644 --- a/README.md +++ b/README.md @@ -149,6 +149,8 @@ If you prefer to run the application manually: | `VISION_LLM_MODEL` | The model name to use for OCR (e.g., `minicpm-v`). | No | | `LOG_LEVEL` | The log level for the application (`info`, `debug`, `warn`, `error`). Default is `info`. | No | | `LISTEN_INTERFACE` | The interface paperless-gpt listens to. Default is `:8080` | No | +| `AUTO_GENERATE_TITLE` | Enable/disable title generation when automatically applying suggestions with `paperless-gpt-auto`. Default is `true` | No | +| `AUTO_GENERATE_TAGS` | Enable/disable tag generation when automatically applying suggestions with `paperless-gpt-auto`. Default is `true` | No | **Note:** When using Ollama, ensure that the Ollama server is running and accessible from the paperless-gpt container. diff --git a/main.go b/main.go index 57f807f..56662be 100644 --- a/main.go +++ b/main.go @@ -38,6 +38,8 @@ var ( visionLlmModel = os.Getenv("VISION_LLM_MODEL") logLevel = strings.ToLower(os.Getenv("LOG_LEVEL")) listenInterface = os.Getenv("LISTEN_INTERFACE") + autoGenerateTitle = os.Getenv("AUTO_GENERATE_TITLE") + autoGenerateTags = os.Getenv("AUTO_GENERATE_TAGS") // Templates titleTemplate *template.Template @@ -283,8 +285,8 @@ func (app *App) processAutoTagDocuments() (int, error) { suggestionRequest := GenerateSuggestionsRequest{ Documents: documents, - GenerateTitles: true, - GenerateTags: true, + GenerateTitles: strings.ToLower(autoGenerateTitle) != "false", + GenerateTags: strings.ToLower(autoGenerateTags) != "false", } suggestions, err := app.generateDocumentSuggestions(ctx, suggestionRequest)