Skip to content

Commit

Permalink
Combined Persona and Prompt API (#3690)
Browse files Browse the repository at this point in the history
* Combined Persona and Prompt API

* quality

* added tests

* consolidated models and got rid of redundant fields

* tenant appreciation day

* reverted default
  • Loading branch information
hagen-danswer authored Jan 17, 2025
1 parent 880c42a commit 1ad2128
Show file tree
Hide file tree
Showing 22 changed files with 605 additions and 740 deletions.
6 changes: 3 additions & 3 deletions backend/ee/onyx/server/seeding.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from onyx.db.llm import upsert_llm_provider
from onyx.db.models import Tool
from onyx.db.persona import upsert_persona
from onyx.server.features.persona.models import CreatePersonaRequest
from onyx.server.features.persona.models import PersonaUpsertRequest
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
from onyx.server.settings.models import Settings
from onyx.server.settings.store import store_settings as store_base_settings
Expand Down Expand Up @@ -57,7 +57,7 @@ class SeedConfiguration(BaseModel):
llms: list[LLMProviderUpsertRequest] | None = None
admin_user_emails: list[str] | None = None
seeded_logo_path: str | None = None
personas: list[CreatePersonaRequest] | None = None
personas: list[PersonaUpsertRequest] | None = None
settings: Settings | None = None
enterprise_settings: EnterpriseSettings | None = None

Expand Down Expand Up @@ -128,7 +128,7 @@ def _seed_llms(
)


def _seed_personas(db_session: Session, personas: list[CreatePersonaRequest]) -> None:
def _seed_personas(db_session: Session, personas: list[PersonaUpsertRequest]) -> None:
if personas:
logger.notice("Seeding Personas")
for persona in personas:
Expand Down
2 changes: 1 addition & 1 deletion backend/onyx/chat/chat_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from onyx.db.models import Prompt
from onyx.db.models import Tool
from onyx.db.models import User
from onyx.db.persona import get_prompts_by_ids
from onyx.db.prompts import get_prompts_by_ids
from onyx.llm.models import PreviousMessage
from onyx.natural_language_processing.utils import BaseTokenizer
from onyx.server.query_and_chat.models import CreateChatMessageRequest
Expand Down
6 changes: 4 additions & 2 deletions backend/onyx/chat/prompt_builder/citations_prompt.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
from langchain.schema.messages import HumanMessage
from langchain.schema.messages import SystemMessage
from sqlalchemy.orm import Session

from onyx.chat.models import LlmDoc
from onyx.chat.models import PromptConfig
from onyx.configs.model_configs import GEN_AI_SINGLE_USER_MESSAGE_EXPECTED_MAX_TOKENS
from onyx.context.search.models import InferenceChunk
from onyx.db.models import Persona
from onyx.db.persona import get_default_prompt__read_only
from onyx.db.prompts import get_default_prompt
from onyx.db.search_settings import get_multilingual_expansion
from onyx.llm.factory import get_llms_for_persona
from onyx.llm.factory import get_main_llm_from_tuple
Expand Down Expand Up @@ -97,11 +98,12 @@ def compute_max_document_tokens(


def compute_max_document_tokens_for_persona(
db_session: Session,
persona: Persona,
actual_user_input: str | None = None,
max_llm_token_override: int | None = None,
) -> int:
prompt = persona.prompts[0] if persona.prompts else get_default_prompt__read_only()
prompt = persona.prompts[0] if persona.prompts else get_default_prompt(db_session)
return compute_max_document_tokens(
prompt_config=PromptConfig.from_model(prompt),
llm_config=get_main_llm_from_tuple(get_llms_for_persona(persona)).config,
Expand Down
20 changes: 0 additions & 20 deletions backend/onyx/chat/prompt_builder/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,6 @@
from onyx.file_store.models import InMemoryChatFile
from onyx.llm.models import PreviousMessage
from onyx.llm.utils import build_content_with_imgs
from onyx.prompts.direct_qa_prompts import PARAMATERIZED_PROMPT
from onyx.prompts.direct_qa_prompts import PARAMATERIZED_PROMPT_WITHOUT_CONTEXT


def build_dummy_prompt(
system_prompt: str, task_prompt: str, retrieval_disabled: bool
) -> str:
if retrieval_disabled:
return PARAMATERIZED_PROMPT_WITHOUT_CONTEXT.format(
user_query="<USER_QUERY>",
system_prompt=system_prompt,
task_prompt=task_prompt,
).strip()

return PARAMATERIZED_PROMPT.format(
context_docs_str="<CONTEXT_DOCS>",
user_query="<USER_QUERY>",
system_prompt=system_prompt,
task_prompt=task_prompt,
).strip()


def translate_onyx_msg_to_langchain(
Expand Down
Loading

0 comments on commit 1ad2128

Please sign in to comment.