Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

private-gpt: init at 0.5.0 #305586

Merged
merged 2 commits into from
May 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions nixos/doc/manual/release-notes/rl-2405.section.md
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,8 @@ The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been m

- [isolate](https://github.com/ioi/isolate), a sandbox for securely executing untrusted programs. Available as [security.isolate](#opt-security.isolate.enable).

- [private-gpt](https://github.com/zylon-ai/private-gpt), a service to interact with your documents using the power of LLMs, 100% privately, no data leaks. Available as [services.private-gpt](#opt-services.private-gpt.enable).

## Backward Incompatibilities {#sec-release-24.05-incompatibilities}

<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
Expand Down
1 change: 1 addition & 0 deletions nixos/modules/module-list.nix
Original file line number Diff line number Diff line change
Expand Up @@ -776,6 +776,7 @@
./services/misc/polaris.nix
./services/misc/portunus.nix
./services/misc/preload.nix
./services/misc/private-gpt.nix
./services/misc/prowlarr.nix
./services/misc/pufferpanel.nix
./services/misc/pykms.nix
Expand Down
121 changes: 121 additions & 0 deletions nixos/modules/services/misc/private-gpt.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
{ config
, lib
, pkgs
, ...
}:
let
inherit (lib) types;

format = pkgs.formats.yaml { };
cfg = config.services.private-gpt;
in
{
options = {
services.private-gpt = {
enable = lib.mkEnableOption "private-gpt for local large language models";
package = lib.mkPackageOption pkgs "private-gpt" { };

stateDir = lib.mkOption {
type = types.path;
default = "/var/lib/private-gpt";
description = "State directory of private-gpt.";
};

settings = lib.mkOption {
type = format.type;
default = {
llm = {
mode = "ollama";
tokenizer = "";
};
embedding = {
mode = "ollama";
};
ollama = {
llm_model = "llama3";
embedding_model = "nomic-embed-text";
api_base = "http://localhost:11434";
embedding_api_base = "http://localhost:11434";
keep_alive = "5m";
tfs_z = 1;
top_k = 40;
top_p = 0.9;
repeat_last_n = 64;
repeat_penalty = 1.2;
request_timeout = 120;
};
vectorstore = {
database = "qdrant";
};
qdrant = {
path = "/var/lib/private-gpt/vectorstore/qdrant";
};
data = {
local_data_folder = "/var/lib/private-gpt";
};
openai = { };
azopenai = { };
};
description = ''
settings-local.yaml for private-gpt
'';
};
};
};

config = lib.mkIf cfg.enable {
systemd.services.private-gpt = {
description = "Interact with your documents using the power of GPT, 100% privately, no data leaks";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];

preStart =
let
config = format.generate "settings-local.yaml" (cfg.settings // { server.env_name = "local"; });
in
''
mkdir -p ${cfg.stateDir}/{settings,huggingface,matplotlib,tiktoken_cache}
cp ${cfg.package.cl100k_base.tiktoken} ${cfg.stateDir}/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4
cp ${pkgs.python3Packages.private-gpt}/${pkgs.python3.sitePackages}/private_gpt/settings.yaml ${cfg.stateDir}/settings/settings.yaml
cp "${config}" "${cfg.stateDir}/settings/settings-local.yaml"
chmod 600 "${cfg.stateDir}/settings/settings-local.yaml"
drupol marked this conversation as resolved.
Show resolved Hide resolved
'';

environment = {
PGPT_PROFILES = "local";
PGPT_SETTINGS_FOLDER = "${cfg.stateDir}/settings";
HF_HOME = "${cfg.stateDir}/huggingface";
TRANSFORMERS_OFFLINE = "1";
HF_DATASETS_OFFLINE = "1";
MPLCONFIGDIR = "${cfg.stateDir}/matplotlib";
};

serviceConfig = {
ExecStart = lib.getExe cfg.package;
WorkingDirectory = cfg.stateDir;
StateDirectory = "private-gpt";
RuntimeDirectory = "private-gpt";
RuntimeDirectoryMode = "0755";
PrivateTmp = true;
DynamicUser = true;
DevicePolicy = "closed";
LockPersonality = true;
MemoryDenyWriteExecute = true;
PrivateUsers = true;
ProtectHome = true;
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectControlGroups = true;
ProcSubset = "pid";
RestrictNamespaces = true;
RestrictRealtime = true;
SystemCallArchitectures = "native";
UMask = "0077";
};
};
};

meta.maintainers = with lib.maintainers; [ drupol ];
}
1 change: 1 addition & 0 deletions nixos/tests/all-tests.nix
Original file line number Diff line number Diff line change
Expand Up @@ -757,6 +757,7 @@ in {
pretix = runTest ./web-apps/pretix.nix;
printing-socket = handleTest ./printing.nix { socket = true; };
printing-service = handleTest ./printing.nix { socket = false; };
private-gpt = handleTest ./private-gpt.nix {};
privoxy = handleTest ./privoxy.nix {};
prometheus = handleTest ./prometheus.nix {};
prometheus-exporters = handleTest ./prometheus-exporters.nix {};
Expand Down
27 changes: 27 additions & 0 deletions nixos/tests/private-gpt.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import ./make-test-python.nix ({ pkgs, lib, ... }:
let
mainPort = "8001";
in
{
name = "private-gpt";
meta = with lib.maintainers; {
maintainers = [ drupol ];
};

nodes = {
machine = { ... }: {
services.private-gpt = {
enable = true;
};
};
};

testScript = ''
machine.start()

machine.wait_for_unit("private-gpt.service")
machine.wait_for_open_port(${mainPort})

machine.succeed("curl http://127.0.0.1:${mainPort}")
'';
})
17 changes: 17 additions & 0 deletions pkgs/by-name/pr/private-gpt/package.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{ python3Packages
, makeBinaryWrapper
}:

python3Packages.toPythonApplication (python3Packages.private-gpt.overrideAttrs (oldAttrs: {
nativeBuildInputs = oldAttrs.nativeBuildInputs ++ [ makeBinaryWrapper ];

passthru.cl100k_base = {
inherit (python3Packages.private-gpt.cl100k_base) tiktoken;
};

postInstall = ''
makeWrapper ${python3Packages.python.interpreter} $out/bin/private-gpt \
--prefix PYTHONPATH : "$PYTHONPATH" \
--add-flags "-m private_gpt"
'';
}))
119 changes: 119 additions & 0 deletions pkgs/development/python-modules/private-gpt/default.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
{ lib
, buildPythonPackage
, python
, fetchFromGitHub
, poetry-core
, fastapi
, injector
, llama-index-core
, llama-index-readers-file
, huggingface-hub
, python-multipart
, pyyaml
, transformers
, uvicorn
, watchdog
, gradio
, fetchurl
, fetchpatch
}:

buildPythonPackage rec {
pname = "private-gpt";
version = "0.5.0";
pyproject = true;

src = fetchFromGitHub {
owner = "zylon-ai";
repo = "private-gpt";
rev = "v${version}";
hash = "sha256-bjydzJhOJjmbflcJbuMyNsmby7HtNPFW3MY2Tw12cHw=";
};

patches = [
# Fix a vulnerability, to be removed in the next bump version
# See https://github.com/zylon-ai/private-gpt/pull/1890
(fetchpatch {
url = "https://github.com/zylon-ai/private-gpt/commit/86368c61760c9cee5d977131d23ad2a3e063cbe9.patch";
hash = "sha256-4ysRUuNaHW4bmNzg4fn++89b430LP6AzYDoX2HplVH0=";
})
];
drupol marked this conversation as resolved.
Show resolved Hide resolved

build-system = [
poetry-core
];

dependencies = [
fastapi
injector
llama-index-core
llama-index-readers-file
python-multipart
pyyaml
transformers
uvicorn
watchdog
] ++ lib.flatten (builtins.attrValues passthru.optional-dependencies);
drupol marked this conversation as resolved.
Show resolved Hide resolved

# This is needed for running the tests and the service in offline mode,
# See related issue at https://github.com/zylon-ai/private-gpt/issues/1870
passthru.cl100k_base.tiktoken = fetchurl {
url = "https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken";
hash = "sha256-Ijkht27pm96ZW3/3OFE+7xAPtR0YyTWXoRO8/+hlsqc=";
};

passthru.optional-dependencies = with python.pkgs; {
embeddings-huggingface = [
huggingface-hub
llama-index-embeddings-huggingface
];
embeddings-ollama = [
llama-index-embeddings-ollama
];
embeddings-openai = [
llama-index-embeddings-openai
];
embeddings-sagemaker = [
boto3
];
llms-ollama = [
llama-index-llms-ollama
];
llms-openai = [
llama-index-llms-openai
];
llms-openai-like = [
llama-index-llms-openai-like
];
llms-sagemaker = [
boto3
];
ui = [
gradio
];
vector-stores-chroma = [
llama-index-vector-stores-chroma
];
vector-stores-postgres = [
llama-index-vector-stores-postgres
];
vector-stores-qdrant = [
llama-index-vector-stores-qdrant
];
};

postInstall = ''
cp settings*.yaml $out/${python.sitePackages}/private_gpt/
'';

pythonImportsCheck = [ "private_gpt" ];

meta = {
changelog = "https://github.com/zylon-ai/private-gpt/blob/${src.rev}/CHANGELOG.md";
description = "Interact with your documents using the power of GPT, 100% privately, no data leaks";
homepage = "https://github.com/zylon-ai/private-gpt";
license = lib.licenses.asl20;
mainProgram = "private-gpt";
maintainers = with lib.maintainers; [ drupol ];
};
}
2 changes: 2 additions & 0 deletions pkgs/top-level/python-packages.nix
Original file line number Diff line number Diff line change
Expand Up @@ -9891,6 +9891,8 @@ self: super: with self; {

ppdeep = callPackage ../development/python-modules/ppdeep { };

private-gpt = callPackage ../development/python-modules/private-gpt { };

prodict = callPackage ../development/python-modules/prodict { };

prometheus-pandas = callPackage ../development/python-modules/prometheus-pandas { };
Expand Down