add structured output llm call

This commit is contained in:
robinrolle
2025-04-12 13:07:17 +02:00
parent c0a04c5282
commit 5818c9913f
3 changed files with 40 additions and 2 deletions

View File

@ -1,3 +1,4 @@
API_URI= API_URI=
API_KEY= API_KEY=
API_TEAM= API_TEAM=
GROQ_API_KEY=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

View File

@ -14,4 +14,6 @@ pypdfium2==4.30.1
pytesseract==0.3.13 pytesseract==0.3.13
requests==2.32.3 requests==2.32.3
urllib3==2.4.0 urllib3==2.4.0
pydantic==2.11.3 pydantic==2.11.3
langchain==0.3.23
langchain-groq==0.3.2

View File

@ -0,0 +1,35 @@
from langchain_core.runnables import Runnable
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import PydanticOutputParser
from langchain_groq import ChatGroq
from pydantic import BaseModel, Field
# Step 1: Define the structured output
class CountryAnswer(BaseModel):
answer: str = Field(..., description="La réponse à la question")
country: str = Field(..., description="Le pays concerné")
# Step 2: Create the output parser
parser = PydanticOutputParser(pydantic_object=CountryAnswer)
# Step 3: Create the prompt
prompt = ChatPromptTemplate.from_template(
"Tu es un assistant utile. Réponds à la question : {question}\n"
"Réponds uniquement en JSON avec ce format :\n{format_instructions}"
)
# Step 4: LLM configuration
llm = ChatGroq(model_name="llama3-70b-8192", temperature=0.7)
# Step 5: Combine everything
chain: Runnable = prompt | llm | parser
# Step 6: Run the chain
response = chain.invoke({
"question": "Quelle est la capitale de la Suisse ?",
"format_instructions": parser.get_format_instructions()
})
# Result
print(response)