Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Streaming choice feature #2070

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
65 changes: 48 additions & 17 deletions private_gpt/ui/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@ def __init__(

self._selected_filename = None

self._response_style = True

basicbloke marked this conversation as resolved.
Show resolved Hide resolved
# Initialize system prompt based on default mode
self.mode = MODES[0]
self._system_prompt = self._get_default_system_prompt(self.mode)
Expand Down Expand Up @@ -182,18 +184,28 @@ def build_history() -> list[ChatMessage]:
docs_ids.append(ingested_document.doc_id)
context_filter = ContextFilter(docs_ids=docs_ids)

query_stream = self._chat_service.stream_chat(
messages=all_messages,
use_context=True,
context_filter=context_filter,
)
yield from yield_deltas(query_stream)
if self._response_style:
query_stream = self._chat_service.stream_chat(
all_messages, use_context=False
)
yield from yield_deltas(query_stream)
else:
query_response = self._chat_service.chat(
all_messages, use_context=False
).response
yield from [query_response]

case Modes.BASIC_CHAT_MODE:
llm_stream = self._chat_service.stream_chat(
messages=all_messages,
use_context=False,
)
yield from yield_deltas(llm_stream)
if self._response_style:
llm_stream = self._chat_service.stream_chat(
all_messages, use_context=False
)
yield from yield_deltas(llm_stream)
else:
llm_response = self._chat_service.chat(
all_messages, use_context=False
).response
yield from [llm_response]

case Modes.SEARCH_MODE:
response = self._chunks_service.retrieve_relevant(
Expand Down Expand Up @@ -221,12 +233,20 @@ def build_history() -> list[ChatMessage]:
docs_ids.append(ingested_document.doc_id)
context_filter = ContextFilter(docs_ids=docs_ids)

summary_stream = self._summarize_service.stream_summarize(
use_context=True,
context_filter=context_filter,
instructions=message,
)
yield from yield_tokens(summary_stream)
if self._response_style:
summary_stream = self._summarize_service.stream_summarize(
use_context=True,
context_filter=context_filter,
instructions=message,
)
yield from yield_tokens(summary_stream)
else:
summary_response = self._summarize_service.summarize(
use_context=True,
context_filter=context_filter,
instructions=message,
)
yield from summary_response

# On initialization and on mode change, this function set the system prompt
# to the default prompt based on the mode (and user settings).
Expand Down Expand Up @@ -279,6 +299,9 @@ def _set_current_mode(self, mode: Modes) -> Any:
gr.update(value=self._explanation_mode),
]

def _set_response_style(self, response_style: bool) -> None:
self._response_style = response_style

def _list_ingested_files(self) -> list[list[str]]:
files = set()
for ingested_document in self._ingest_service.list_ingested():
Expand Down Expand Up @@ -402,6 +425,14 @@ def _build_ui_blocks(self) -> gr.Blocks:
max_lines=3,
interactive=False,
)
response_style = gr.Checkbox(
label="Response Style: Streaming",
value=self._response_style
)
response_style.input(
self._set_response_style,
inputs=response_style
)
upload_button = gr.components.UploadButton(
"Upload File(s)",
type="filepath",
Expand Down
12 changes: 6 additions & 6 deletions settings.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,20 @@ ui:
enabled: true
path: /
default_chat_system_prompt: >
You are a helpful, respectful and honest assistant.
You are a helpful, respectful and honest assistant.
Always answer as helpfully as possible and follow ALL given instructions.
Do not speculate or make up information.
Do not reference any given instructions or context.
default_query_system_prompt: >
You can only answer questions about the provided context.
If you know the answer but it is not based in the provided context, don't provide
You can only answer questions about the provided context.
If you know the answer but it is not based in the provided context, don't provide
the answer, just state the answer is not in the context provided.
default_summarization_system_prompt: >
Provide a comprehensive summary of the provided context information.
Provide a comprehensive summary of the provided context information.
The summary should cover all the key points and main ideas presented in
the original text, while also condensing the information into a concise
the original text, while also condensing the information into a concise
and easy-to-understand format. Please ensure that the summary includes
relevant details and examples that support the main ideas, while avoiding
relevant details and examples that support the main ideas, while avoiding
any unnecessary information or repetition.
delete_file_button_enabled: true
delete_all_files_button_enabled: true
Expand Down
Loading