I'd be happy to help you build a chatbot UI for your Python file. To get started, could you please share your main .py file or provide more details about its functionality? Once I have that information, I can guide you through creating a user-friendly chatbot interface using the AI SDK and a suitable web framework like Next.js 12.
Here's what we can do:
Could you share your main .py file or describe its main features so we can begin?
main.py from fastapi import FastAPI from chat_api import create_new_chat, get_chat from db_utils import upload_pdf, process_query from fastapi.middleware.cors import CORSMiddleware from global_rag import GLOBAL_THREAD_ID, add_to_global_knowledge, query_global_knowledge
app = FastAPI()
app.add_middleware( CORSMiddleware, allow_origins=["https://v0-chatbot-test-mgcc7x7nszd-j247f9tl9-omarqudas-projects.vercel.app"], # Your frontend URL allow_credentials=True, allow_methods=[""], # Allow all HTTP methods allow_headers=[""], # Allow all headers )
app.add_api_route("/new-chat/", create_new_chat, methods=["POST"]) app.add_api_route("/chat/{thread_id}", get_chat, methods=["GET"])
app.add_api_route("/upload-pdf/", upload_pdf, methods=["POST"]) app.add_api_route("/query/", process_query, methods=["POST"])
if name == "main": import uvicorn uvicorn.run(app, host="0.0.0.0", port=8001)
a very simple chatbot, with the ability to add new chats, which is connected to add new_chat which will add to mongo db, and the conversation from each chat will be saved in the UI and can show the data from the get_chat function that retrieve data from mongo db, upload_pdf can process the uploaded pdfs to the chat, query function will receive the users prompt
Thank you for sharing your main.py file. I understand that you have a FastAPI backend with functionality for creating new chats, retrieving chat history, uploading PDFs, and processing queries. Let's create a chatbot UI that integrates with your existing backend using the AI SDK and Next.js 12.
Here's how we can build a chatbot UI for your application:
npx create-next-app@latest chatbot-ui
cd chatbot-ui
npm install ai @ai-sdk/openai @vercel/kv
app/page.tsx
:'use client'
import { useState } from 'react'
import { useChat } from 'ai/react'
import { Button } from "@/components/ui/button"
import { Input } from "@/components/ui/input"
import { Card, CardContent, CardFooter, CardHeader, CardTitle } from "@/components/ui/card"
export default function Chat() {
const [threadId, setThreadId] = useState<string | null>(null)
const { messages, input, handleInputChange, handleSubmit, setMessages } = useChat()
const [pdfFile, setPdfFile] = useState<File | null>(null)
const createNewChat = async () => {
const response = await fetch('http://localhost:8001/new-chat/', { method: 'POST' })
const data = await response.json()
setThreadId(data.thread_id)
setMessages([])
}
const loadChat = async () => {
if (threadId) {
const response = await fetch(`http://localhost:8001/chat/${threadId}`)
const data = await response.json()
setMessages(data.messages.map((m: any) => ({ role: m.role, content: m.content })))
}
}
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
if (e.target.files) {
setPdfFile(e.target.files[0])
}
}
const uploadPdf = async () => {
if (pdfFile) {
const formData = new FormData()
formData.append('file', pdfFile)
await fetch('http://localhost:8001/upload-pdf/', {
method: 'POST',
body: formData,
})
alert('PDF uploaded successfully')
}
}
const onSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault()
if (!threadId) {
alert('Please create a new chat first')
return
}
const userMessage = { role: 'user', content: input }
setMessages(prevMessages => [...prevMessages, userMessage])
const response = await fetch('http://localhost:8001/query/', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ thread_id: threadId, query: input }),
})
const data = await response.json()
setMessages(prevMessages => [...prevMessages, { role: 'assistant', content: data.response }])
}
return (
<div className="flex flex-col items-center justify-center min-h-screen bg-gray-100 p-4">
<Card className="w-full max-w-2xl">
<CardHeader>
<CardTitle>AI Chat</CardTitle>
<div className="flex space-x-2">
<Button onClick={createNewChat}>New Chat</Button>
<Button onClick={loadChat}>Load Chat</Button>
<Input type="file" onChange={handleFileChange} accept=".pdf" />
<Button onClick={uploadPdf}>Upload PDF</Button>
</div>
</CardHeader>
<CardContent className="h-[60vh] overflow-y-auto">
{messages.map((m, index) => (
<div key={index} className={`mb-4 ${m.role === 'user' ? 'text-right' : 'text-left'}`}>
<span className={`inline-block p-2 rounded-lg ${m.role === 'user' ? 'bg-blue-500 text-white' : 'bg-gray-200 text-black'}`}>
{m.content}
</span>
</div>
))}
</CardContent>
<CardFooter>
<form onSubmit={onSubmit} className="flex w-full space-x-2">
<Input
value={input}
onChange={handleInputChange}
placeholder="Type your message..."
className="flex-grow"
/>
<Button type="submit">Send</Button>
</form>
</CardFooter>
</Card>
</div>
)
}
I found some issues in the code block.
I will fix them.
'use client'
import { useState } from 'react'
import { useChat } from 'ai/react'
import { Button } from "@/components/ui/button"
import { Input } from "@/components/ui/input"
import { Card, CardContent, CardFooter, CardHeader, CardTitle } from '@/components/ui/card'
export default function Chat() {
const [threadId, setThreadId] = useState<string | null>(null)
const { messages, input, handleInputChange, handleSubmit, setMessages } = useChat()
const [pdfFile, setPdfFile] = useState<File | null>(null)
const createNewChat = async () => {
const response = await fetch('http://localhost:8001/new-chat/', { method: 'POST' })
const data = await response.json()
setThreadId(data.thread_id)
setMessages([])
}
const loadChat = async () => {
if (threadId) {
const response = await fetch(`http://localhost:8001/chat/${threadId}`)
const data = await response.json()
setMessages(data.messages.map((m: any) => ({ role: m.role, content: m.content })))
}
}
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
if (e.target.files) {
setPdfFile(e.target.files[0])
}
}
const uploadPdf = async () => {
if (pdfFile) {
const formData = new FormData()
formData.append('file', pdfFile)
await fetch('http://localhost:8001/upload-pdf/', {
method: 'POST',
body: formData,
})
alert('PDF uploaded successfully')
}
}
const onSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault()
if (!threadId) {
alert('Please create a new chat first')
return
}
const userMessage = { role: 'user', content: input }
setMessages(prevMessages => [...prevMessages, userMessage])
const response = await fetch('http://localhost:8001/query/', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ thread_id: threadId, query: input }),
})
const data = await response.json()
setMessages(prevMessages => [...prevMessages, { role: 'assistant', content: data.response }])
}
return (
<div className="flex flex-col items-center justify-center min-h-screen bg-gray-100 p-4">
<Card className="w-full max-w-2xl">
<CardHeader>
<CardTitle>AI Chat</CardTitle>
<div className="flex space-x-2">
<Button onClick={createNewChat}>New Chat</Button>
<Button onClick={loadChat}>Load Chat</Button>
<Input type="file" onChange={handleFileChange} accept=".pdf" />
<Button onClick={uploadPdf}>Upload PDF</Button>
</div>
</CardHeader>
<CardContent className="h-[60vh] overflow-y-auto">
{messages.map((m, index) => (
<div key={index} className={`mb-4 ${m.role === 'user' ? 'text-right' : 'text-left'}`}>
<span className={`inline-block p-2 rounded-lg ${m.role === 'user' ? 'bg-blue-500 text-white' : 'bg-gray-200 text-black'}`}>
{m.content}
</span>
</div>
))}
</CardContent>
<CardFooter>
<form onSubmit={onSubmit} className="flex w-full space-x-2">
<Input
value={input}
onChange={handleInputChange}
placeholder="Type your message..."
className="flex-grow"
/>
<Button type="submit">Send</Button>
</form>
</CardFooter>
</Card>
</div>
)
}
This component creates a chat interface that integrates with your backend API. It includes functionality for creating new chats, loading existing chats, uploading PDFs, and sending queries 2.
app/api/chat/route.ts
:import { NextRequest, NextResponse } from 'next/server'
export async function POST(req: NextRequest) {
const { messages } = await req.json()
const threadId = req.headers.get('x-thread-id')
const response = await fetch('http://localhost:8001/query/', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ thread_id: threadId, query: messages[messages.length - 1].content }),
})
const data = await response.json()
return NextResponse.json({ role: 'assistant', content: data.response })
}
This API route will forward the user's query to your backend and return the response.
next.config.js
file to add the necessary rewrites:/** @type {import('next').NextConfig} */
const nextConfig = {
async rewrites() {
return [
{
source: '/api/:path*',
destination: 'http://localhost:8001/:path*',
},
]
},
}
module.exports = nextConfig
This configuration will forward API requests to your FastAPI backend.
useChat
hook in app/page.tsx
to use the new API route:const { messages, input, handleInputChange, handleSubmit, setMessages } = useChat({
api: '/api/chat',
headers: { 'x-thread-id': threadId || '' },
})
Now you have a chatbot UI that integrates with your existing Python backend. This UI allows users to:
To run the application, start your FastAPI backend and then run the Next.js development server:
npm run dev
This setup provides a solid foundation for your chatbot UI. You can further customize the design and add more features as needed .