From 537527fba0fad86a767d9d34904086518d0cdf93 Mon Sep 17 00:00:00 2001 From: robinrolle Date: Sun, 13 Apr 2025 01:13:20 +0200 Subject: [PATCH] refactor using openai --- requirements.txt | 1 + services/advisor.py | 4 ++-- services/extractor.py | 8 ++++---- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index df31072..20afd0f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -40,6 +40,7 @@ Flask==3.1.0 annotated-types==0.7.0 blinker==1.9.0 langchain-google-genai==2.1.2 +langchain-openai numpy==2.2.4 pymupdf == 1.25.5 flask-cors==5.0.1 diff --git a/services/advisor.py b/services/advisor.py index 1f26f92..ebbc0ec 100644 --- a/services/advisor.py +++ b/services/advisor.py @@ -7,7 +7,7 @@ from dto.requests import GameStartRequestDTO, GameDecisionRequestDTO from services.extractor import extract_profile, extract_passport, extract_description, extract_account from services.julius_baer_api_client import JuliusBaerApiClient from utils.storage.game_files_manager import store_game_round_data -from langchain_google_genai import ChatGoogleGenerativeAI +from langchain_openai.chat_models import ChatOpenAI from validation.llm_validate import AdvisorDecision from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers import PydanticOutputParser @@ -129,7 +129,7 @@ class Advisor: ) # 4. ChaƮne LLM - chain = prompt | ChatGoogleGenerativeAI(model="gemini-2.0-flash") | parser + chain = prompt | ChatOpenAI(model="gpt-4o-mini") | parser # 5. Invocation result: AdvisorDecision = chain.invoke({ diff --git a/services/extractor.py b/services/extractor.py index 5ba52d2..e80855e 100644 --- a/services/extractor.py +++ b/services/extractor.py @@ -4,7 +4,7 @@ from typing import Callable, Type, Any, TypeVar from langchain_core.runnables import Runnable from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers import PydanticOutputParser -from langchain_google_genai import ChatGoogleGenerativeAI +from langchain_openai.chat_models import ChatOpenAI from pydantic import BaseModel from utils.parsers import process_profile, process_passport, process_account @@ -73,7 +73,7 @@ def extract_passport(client_data: dict[str, Any]) -> FromPassport: def extract_profile(client_data: dict[str, Any]) -> FromProfile: - passport_data = client_data.get("profile") + profile_data = client_data.get("profile") prompt_template = ( "Extract the following information from the provided text.\n" @@ -83,7 +83,7 @@ def extract_profile(client_data: dict[str, Any]) -> FromProfile: ) result = __run_extraction_chain( - raw_file_data=passport_data, + raw_file_data=profile_data, file_processor=process_profile, pydantic_model=FromProfile, prompt_template=prompt_template, @@ -125,7 +125,7 @@ def __run_extraction_chain( prompt = ChatPromptTemplate.from_template(prompt_template) - chain: Runnable = prompt | ChatGoogleGenerativeAI(model=model_name) | parser + chain: Runnable = prompt | ChatOpenAI(model="gpt-4o-mini") | parser result = chain.invoke({ "processed_text": processed_text,