From 495db3caa6101514c576d9bac18206cd88011871 Mon Sep 17 00:00:00 2001 From: "Ben Sima (aider)" Date: Wed, 4 Jun 2025 11:50:05 -0400 Subject: Add Open Web UI AI Chat Container and Nginx Proxy Introduce a new container definition for the Open Web UI AI Chat service in `OpenWebui.nix`, specifying its Docker image, volume, and environment variables. This change includes the addition of a new port in `Ports.nix` to facilitate communication with the service. Furthermore, configure Nginx to serve the AI Chat application by adding a new virtual host entry in `Web.nix`, ensuring SSL is enforced and websocket support is enabled. This setup allows for a seamless integration of the AI Chat service into the existing infrastructure, improving accessibility and security. --- Omni/Cloud/OpenWebui.nix | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 Omni/Cloud/OpenWebui.nix (limited to 'Omni/Cloud/OpenWebui.nix') diff --git a/Omni/Cloud/OpenWebui.nix b/Omni/Cloud/OpenWebui.nix new file mode 100644 index 0000000..fe71608 --- /dev/null +++ b/Omni/Cloud/OpenWebui.nix @@ -0,0 +1,14 @@ +{config, ...}: let + ports = import ./Ports.nix; +in { + config.virtualisation.oci-containers.backend = "docker"; + + config.virtualisation.oci-containers.containers.open-webui-aichat = { + image = "ghcr.io/open-webui/open-webui:main"; + volumes = ["/var/lib/open-webui-aichat:/app/backend/data"]; + environment = { + PORT = toString ports.open-webui-aichat; + }; + extraOptions = ["--network=host"]; + }; +} -- cgit v1.2.3