In Langserve API, created below route
eg: http://localhost:8000/get_gpt4/invoke
app = FastAPI(title=" API",
version="1.0",
description="Helper APIs to augment language models"
)
add_routes(
app,
PredictCustomOutput(),
path="/get_gpt4",
)
in xyz.py
from langchain_core.output_parsers import JsonOutputParser
from langchain_core.pydantic_v1 import BaseModel, Field
class Classify(BaseModel):
Prediction: str = Field(description="predict text")
Explanation: str = Field(description="predicted explanation text")
def PredictCustomOutput() -> Runnable:
""" Return chain """
model = get_ChatOpenAI(model_name="azure-openai")
template = get_prompt_template(name="gpt4_ae_classify_finetune")
parser = JsonOutputParser(pydantic_object=Classify)
prompt = PromptTemplate(
template=template+"\n{format_instructions}\n{text}\n",
input_variables=["text"],
partial_variables={"format_instructions": parser.get_format_instructions()},
)
chain = prompt | model | parser
return chain
While invoking the API from postman or FastAPI, I was expecting to receive the following response output:
{
"Prediction": "Event",
"Explanation": "description of predict "
}
but instead, I'm getting the response below:
{
"output": {
"Prediction": "Event",
"Explanation": "description of predict "
},
"callback_events": [],
"metadata": {
"run_id": "14f4a9a8-f3a8-4773-a29a-6d445c4a1ac8"
}
}
Output parser I was trying this on Notebook, but I want to get a response from the API in this format:
{
"Prediction": "Event",
"Explanation": "description of predict "
}