Skip to content

Commit

Permalink
fix(ollama): Use podman for open-webui
Browse files Browse the repository at this point in the history
  • Loading branch information
mrjones2014 committed Feb 20, 2024
1 parent 12d07f0 commit 6487559
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 54 deletions.
2 changes: 1 addition & 1 deletion hosts/pc/default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
hashedPassword =
"$y$j9T$L.RrmE3CRSB.lQayiw2ZN/$vA4XkSR13yL016t3HaZ11uCN/sCmXqBcuUcSBxMjiPD";
home = "/home/mat";
extraGroups = [ "networkmanager" "wheel" ];
extraGroups = [ "networkmanager" "wheel" "oci" ];
};
powerManagement.cpuFreqGovernor = "performance";
hardware = {
Expand Down
67 changes: 14 additions & 53 deletions nixos-modules/open-webui.nix
Original file line number Diff line number Diff line change
Expand Up @@ -2,60 +2,21 @@

# Download LLMs per api
# curl http://localhost:11434/api/pull -d '{ "name": "llama2" }'
{ pkgs, ... }:

let
open-webui-static = pkgs.buildNpmPackage {
pname = "open-webui";
version = "0.0.1";

src = pkgs.fetchFromGitHub {
owner = "open-webui";
repo = "open-webui";
rev = "76a788939f92a7a7d9705f971b9ce6e27b249d31";
sha256 = "sha256-MWgERNvg3FX1N6GD11Zl27Ni/tuEoRyYNWPiLiHst2M=";
{ config, ... }: {
virtualisation = {
podman = {
enable = true;
dockerCompat = true;
defaultNetwork.settings.dns_enabled = true;
};
npmDepsHash = "sha256-TavFWEROSXS3GKbMzKhblLYLuN1tpXzlJG0Tm5p6fMI=";

PUBLIC_API_BASE_URL = "http://localhost:11434/api";

# Ollama URL for the backend to connect
# The path '/ollama/api' will be redirected to the specified backend URL
OLLAMA_API_BASE_URL = "http://localhost:11434/api";
# npm run build creates a static "build" folder.
installPhase = ''
cp -R ./build $out
'';
};
open-webui = pkgs.writeShellScriptBin "open-webui" ''
# cors: allow broswer to make requests to ollama on different port than website
${pkgs.nodePackages.http-server}/bin/http-server ${open-webui-static} --cors='*' --port 8080
'';
in {
# create a Linux user that will run ollama
# and has access rights to store LLM files.
users.users.ollama = {
name = "ollama";
group = "ollama";
description = "Ollama user";
isSystemUser = true;
};
# suggested by nix build, no idea why
users.groups.ollama = { };
systemd.services.open-webui = {
description = "Ollama WebUI Service";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
enable = true;

serviceConfig = {
ExecStart = "${open-webui}/bin/open-webui";
# DynamicUser = "true";
User = "ollama";
Type = "simple";
Restart = "always";
# RestartSec = 3;
# KillMode = "process";
};
virtualisation.oci-containers.backend = "podman";
virtualisation.oci-containers.containers.open-webui = {
autoStart = true;
image = "ghcr.io/open-webui/open-webui";
ports = [ "3000:8080" ];
# TODO figure out how to create the data directory declaratively
volumes = [ "${config.users.users.mat.home}/open-webui:/app/backend/data" ];
extraOptions = [ "--add-host=host.docker.internal:host-gateway" ];
};
}

0 comments on commit 6487559

Please sign in to comment.