1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
| import base64 from typing import Any, Dict, List, Tuple
from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from langchain_community.chat_models import ChatOllama from langchain.document_loaders.blob_loaders import Blob from langchain.document_loaders.parsers.pdf import PDFMinerParser from langchain.pydantic_v1 import BaseModel, Field from langchain.schema.messages import ( AIMessage, BaseMessage, FunctionMessage, HumanMessage, ) from langchain.schema.runnable import RunnableLambda from langchain_core.runnables import RunnableParallel
from langserve import CustomUserType from langserve.server import add_routes
app = FastAPI( title="LangChain Server", version="1.0", description="Spin up a simple api server using Langchain's Runnable interfaces", )
app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], expose_headers=["*"], )
class ChatHistory(CustomUserType): chat_history: List[Tuple[str, str]] = Field( ..., examples=[[("a", "aa")]], extra={"widget": {"type": "chat", "input": "question", "output": "answer"}}, ) question: str
def _format_to_messages(input: ChatHistory) -> List[BaseMessage]: """Format the input to a list of messages.""" history = input.chat_history user_input = input.question
messages = []
for human, ai in history: messages.append(HumanMessage(content=human)) messages.append(AIMessage(content=ai)) messages.append(HumanMessage(content=user_input)) return messages
model = ChatOllama(model="llama2") chat_model = RunnableParallel({"answer": (RunnableLambda(_format_to_messages) | model)}) add_routes( app, chat_model.with_types(input_type=ChatHistory), config_keys=["configurable"], path="/chat", )
class FileProcessingRequest(BaseModel): file: bytes = Field(..., extra={"widget": {"type": "base64file"}}) num_chars: int = 100
def process_file(input: Dict[str, Any]) -> str: """Extract the text from the first page of the PDF.""" content = base64.decodebytes(input["file"]) blob = Blob(data=content) documents = list(PDFMinerParser().lazy_parse(blob)) content = documents[0].page_content return content[: input["num_chars"]]
add_routes( app, RunnableLambda(process_file).with_types(input_type=FileProcessingRequest), config_keys=["configurable"], path="/pdf", )
add_routes( app, model, path="/ollama", )
if __name__ == "__main__": import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
|