mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-03-21 03:23:20 +00:00
nixos/nextjs-ollama-llm-ui: init module
NixOS already has good support for the Ollama backend service. Now we can benefit from having a convenient web frontend as well for it.
This commit is contained in:
parent
1df1f8d3be
commit
8a05b4f8d4
nixos
doc/manual/release-notes
modules
tests
pkgs/by-name/ne/nextjs-ollama-llm-ui
|
@ -137,6 +137,8 @@ The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been m
|
|||
|
||||
- [ollama](https://ollama.ai), server for running large language models locally.
|
||||
|
||||
- [nextjs-ollama-llm-ui](https://github.com/jakobhoeg/nextjs-ollama-llm-ui), light-weight frontend server to chat with Ollama models through a web app.
|
||||
|
||||
- [ownCloud Infinite Scale Stack](https://owncloud.com/infinite-scale-4-0/), a modern and scalable rewrite of ownCloud.
|
||||
|
||||
- [PhotonVision](https://photonvision.org/), a free, fast, and easy-to-use computer vision solution for the FIRST® Robotics Competition.
|
||||
|
|
|
@ -1399,6 +1399,7 @@
|
|||
./services/web-apps/netbox.nix
|
||||
./services/web-apps/nextcloud.nix
|
||||
./services/web-apps/nextcloud-notify_push.nix
|
||||
./services/web-apps/nextjs-ollama-llm-ui.nix
|
||||
./services/web-apps/nexus.nix
|
||||
./services/web-apps/nifi.nix
|
||||
./services/web-apps/node-red.nix
|
||||
|
|
87
nixos/modules/services/web-apps/nextjs-ollama-llm-ui.nix
Normal file
87
nixos/modules/services/web-apps/nextjs-ollama-llm-ui.nix
Normal file
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
let
|
||||
cfg = config.services.nextjs-ollama-llm-ui;
|
||||
# we have to override the URL to a Ollama service here, because it gets baked into the web app.
|
||||
nextjs-ollama-llm-ui = cfg.package.override { ollamaUrl = "https://ollama.lambdablob.com"; };
|
||||
in
|
||||
{
|
||||
options = {
|
||||
services.nextjs-ollama-llm-ui = {
|
||||
enable = lib.mkEnableOption ''
|
||||
Simple Ollama web UI service; an easy to use web frontend for a Ollama backend service.
|
||||
Run state-of-the-art AI large language models (LLM) similar to ChatGPT locally with privacy
|
||||
on your personal computer.
|
||||
This service is stateless and doesn't store any data on the server; all data is kept
|
||||
locally in your web browser.
|
||||
See https://github.com/jakobhoeg/nextjs-ollama-llm-ui.
|
||||
|
||||
Required: You need the Ollama backend service running by having
|
||||
"services.nextjs-ollama-llm-ui.ollamaUrl" point to the correct url.
|
||||
You can host such a backend service with NixOS through "services.ollama".
|
||||
'';
|
||||
package = lib.mkPackageOption pkgs "nextjs-ollama-llm-ui" { };
|
||||
|
||||
hostname = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "127.0.0.1";
|
||||
example = "ui.example.org";
|
||||
description = ''
|
||||
The hostname under which the Ollama UI interface should be accessible.
|
||||
By default it uses localhost/127.0.0.1 to be accessible only from the local machine.
|
||||
Change to "0.0.0.0" to make it directly accessible from the local network.
|
||||
|
||||
Note: You should keep it at 127.0.0.1 and only serve to the local
|
||||
network or internet from a (home) server behind a reverse-proxy and secured encryption.
|
||||
See https://wiki.nixos.org/wiki/Nginx for instructions on how to set up a reverse-proxy.
|
||||
'';
|
||||
};
|
||||
|
||||
port = lib.mkOption {
|
||||
type = lib.types.port;
|
||||
default = 3000;
|
||||
example = 3000;
|
||||
description = ''
|
||||
The port under which the Ollama UI interface should be accessible.
|
||||
'';
|
||||
};
|
||||
|
||||
ollamaUrl = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "127.0.0.1:11434";
|
||||
example = "https://ollama.example.org";
|
||||
description = ''
|
||||
The address (including host and port) under which we can access the Ollama backend server.
|
||||
!Note that if the the UI service is running under a domain "https://ui.example.org",
|
||||
the Ollama backend service must allow "CORS" requests from this domain, e.g. by adding
|
||||
"services.ollama.environment.OLLAMA_ORIGINS = [ ... "https://ui.example.org" ];"!
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
systemd.services = {
|
||||
|
||||
nextjs-ollama-llm-ui = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
description = "Nextjs Ollama LLM Ui.";
|
||||
after = [ "network.target" ];
|
||||
environment = {
|
||||
HOSTNAME = cfg.hostname;
|
||||
PORT = toString cfg.port;
|
||||
NEXT_PUBLIC_OLLAMA_URL = cfg.ollamaUrl;
|
||||
};
|
||||
serviceConfig = {
|
||||
ExecStart = "${lib.getExe nextjs-ollama-llm-ui}";
|
||||
DynamicUser = true;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
meta.maintainers = with lib.maintainers; [ malteneuss ];
|
||||
}
|
|
@ -616,6 +616,7 @@ in {
|
|||
# TODO: put in networking.nix after the test becomes more complete
|
||||
networkingProxy = handleTest ./networking-proxy.nix {};
|
||||
nextcloud = handleTest ./nextcloud {};
|
||||
nextjs-ollama-llm-ui = runTest ./web-apps/nextjs-ollama-llm-ui.nix;
|
||||
nexus = handleTest ./nexus.nix {};
|
||||
# TODO: Test nfsv3 + Kerberos
|
||||
nfs3 = handleTest ./nfs { version = 3; };
|
||||
|
|
22
nixos/tests/web-apps/nextjs-ollama-llm-ui.nix
Normal file
22
nixos/tests/web-apps/nextjs-ollama-llm-ui.nix
Normal file
|
@ -0,0 +1,22 @@
|
|||
{ lib, ... }:
|
||||
|
||||
{
|
||||
name = "nextjs-ollama-llm-ui";
|
||||
meta.maintainers = with lib.maintainers; [ malteneuss ];
|
||||
|
||||
nodes.machine =
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
services.nextjs-ollama-llm-ui = {
|
||||
enable = true;
|
||||
port = 8080;
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
# Ensure the service is started and reachable
|
||||
machine.wait_for_unit("nextjs-ollama-llm-ui.service")
|
||||
machine.wait_for_open_port(8080)
|
||||
machine.succeed("curl --fail http://127.0.0.1:8080")
|
||||
'';
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
buildNpmPackage,
|
||||
fetchFromGitHub,
|
||||
inter,
|
||||
nixosTests,
|
||||
lib,
|
||||
# This is a app can only be used in a browser and starts a web server only accessible at
|
||||
# localhost/127.0.0.1 from the local computer at the given port.
|
||||
|
@ -82,6 +83,12 @@ buildNpmPackage {
|
|||
doDist = false;
|
||||
#######################
|
||||
|
||||
passthru = {
|
||||
tests = {
|
||||
inherit (nixosTests) nextjs-ollama-llm-ui;
|
||||
};
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "Simple chat web interface for Ollama LLMs.";
|
||||
changelog = "https://github.com/jakobhoeg/nextjs-ollama-llm-ui/releases/tag/v${version}";
|
||||
|
|
Loading…
Reference in a new issue