Skip to content

Commit

Permalink
feat: Add support for Azure OpenAI embeddings and model deployment
Browse files Browse the repository at this point in the history
This commit adds support for specifying the deployment name for Azure OpenAI embeddings and model in the configuration file. The user is prompted to enter the deployment name for both embeddings and model if the corresponding host is set to Azure-OpenAI. The entered deployment names are then stored in the config dictionary as "embeddings-deployment" and "model-deployment" respectively.
  • Loading branch information
fynnfluegge committed Sep 25, 2023
1 parent 6b4b4f1 commit 6879e39
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 8 deletions.
6 changes: 4 additions & 2 deletions codeqai/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,9 @@ def run():
embeddings_model = Embeddings(
local=True,
model=EmbeddingsModel[config["embeddings"].upper().replace("-", "_")],
deployment=config["deployment"] if "deployment" in config else None,
deployment=config["embeddings-deployment"]
if "embeddings-deployment" in config
else None,
)

# check if faiss.index exists
Expand All @@ -71,7 +73,7 @@ def run():
llm = LLM(
llm_host=LllmHost[config["llm-host"].upper().replace("-", "_")],
chat_model=config["chat-model"],
deployment=config["deployment"] if "deployment" in config else None,
deployment=config["model-deployment"] if "model-deployment" in config else None,
)
memory = ConversationSummaryMemory(
llm=llm.chat_model, memory_key="chat_history", return_messages=True
Expand Down
21 changes: 15 additions & 6 deletions codeqai/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,20 +115,29 @@ def create_config():
"llm-host": answers["llm-host"],
}

if (
answers["embeddings"] == "Azure-OpenAI"
or answers["llm-host"] == "Azure-OpenAI"
):
if answers["embeddings"] == "Azure-OpenAI":
questions = [
inquirer.Text(
"deployment",
message="Please enter the Azure OpenAI deployment name.",
message="Please enter the Azure OpenAI embeddings deployment name.",
default="",
),
]
deployment_answer = inquirer.prompt(questions)
if deployment_answer and deployment_answer["deployment"]:
config["deployment"] = deployment_answer["deployment"]
config["embeddings-deployment"] = deployment_answer["deployment"]

if answers["llm-host"] == "Azure-OpenAI":
questions = [
inquirer.Text(
"deployment",
message="Please enter the Azure OpenAI model deployment name.",
default="",
),
]
deployment_answer = inquirer.prompt(questions)
if deployment_answer and deployment_answer["deployment"]:
config["model-deployment"] = deployment_answer["deployment"]

if answers["llm-host"] == "Llamacpp":
questions = [
Expand Down

0 comments on commit 6879e39

Please sign in to comment.