From bede39f6efc582579cd638787fc85920129c2eb2 Mon Sep 17 00:00:00 2001 From: Christoph Ruckstetter Date: Fri, 3 Jan 2025 16:51:36 +0000 Subject: [PATCH] make listen interface configurable (#57) * make listen interface configurable * describe new listen interface setting in readme --- README.md | 2 ++ main.go | 8 ++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 04513bf..79930c2 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,7 @@ services: VISION_LLM_PROVIDER: 'ollama' # Optional (for OCR) - ollama or openai VISION_LLM_MODEL: 'minicpm-v' # Optional (for OCR) - minicpm-v, for example for ollama, gpt-4o for openai LOG_LEVEL: 'info' # Optional or 'debug', 'warn', 'error' + LISTEN_INTERFACE: '127.0.0.1:8080' # Optional, default is ':8080' volumes: - ./prompts:/app/prompts # Mount the prompts directory ports: @@ -147,6 +148,7 @@ If you prefer to run the application manually: | `VISION_LLM_PROVIDER` | The vision LLM provider to use for OCR (`openai` or `ollama`). | No | | `VISION_LLM_MODEL` | The model name to use for OCR (e.g., `minicpm-v`). | No | | `LOG_LEVEL` | The log level for the application (`info`, `debug`, `warn`, `error`). Default is `info`. | No | +| `LISTEN_INTERFACE` | The interface paperless-gpt listens to. Default is `:8080` | No | **Note:** When using Ollama, ensure that the Ollama server is running and accessible from the paperless-gpt container. diff --git a/main.go b/main.go index 4448832..57f807f 100644 --- a/main.go +++ b/main.go @@ -37,6 +37,7 @@ var ( visionLlmProvider = os.Getenv("VISION_LLM_PROVIDER") visionLlmModel = os.Getenv("VISION_LLM_MODEL") logLevel = strings.ToLower(os.Getenv("LOG_LEVEL")) + listenInterface = os.Getenv("LISTEN_INTERFACE") // Templates titleTemplate *template.Template @@ -200,8 +201,11 @@ func main() { numWorkers := 1 // Number of workers to start startWorkerPool(app, numWorkers) - log.Infoln("Server started on port :8080") - if err := router.Run(":8080"); err != nil { + if listenInterface == "" { + listenInterface = ":8080" + } + log.Infoln("Server started on interface", listenInterface) + if err := router.Run(listenInterface); err != nil { log.Fatalf("Failed to run server: %v", err) } }