hts/apps/blogai/lib/agents/researcher.tsx

203 lines
6.1 KiB
TypeScript

import { createStreamableUI, createStreamableValue } from 'ai/rsc'
import {
ExperimentalMessage,
ToolCallPart,
ToolResultPart,
experimental_streamText
} from 'ai'
import { searchSchema } from '@/lib/schema/search'
import { Section } from '@/components/mpv2/section'
import { OpenAI } from 'ai/openai'
import { ToolBadge } from '@/components/mpv2/tool-badge'
import { SearchSkeleton } from '@/components/mpv2/search-skeleton'
import { SearchResults } from '@/components/mpv2/search-results'
import { BotMessage } from '@/components/mpv2/message'
import Exa from 'exa-js'
import { SearchResultsImageSection } from '@/components/mpv2/search-results-image'
import { Card } from '@/components/ui-v2/card'
export async function researcher(
uiStream: ReturnType<typeof createStreamableUI>,
streamText: ReturnType<typeof createStreamableValue<string>>,
messages: ExperimentalMessage[]
) {
const openai = new OpenAI({
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
organization: '' // optional organization
})
const searchAPI: 'tavily' | 'exa' = 'tavily'
let fullResponse = ''
let hasError = false
const answerSection = (
<Section title="Answer">
<BotMessage content={streamText.value} />
</Section>
)
const result = await experimental_streamText({
model: openai.chat(process.env.OPENAI_API_MODEL || 'gpt-4-turbo'),
maxTokens: 2500,
system: `As a professional search expert, you possess the ability to search for any information on the web.
For each user query, utilize the search results to their fullest potential to provide additional information and assistance in your response.
If there are any images relevant to your answer, be sure to include them as well.
Aim to directly address the user's question, augmenting your response with insights gleaned from the search results.
Whenever quoting or referencing information from a specific URL, always cite the source URL explicitly.
`,
messages,
tools: {
search: {
description: 'Search the web for information',
parameters: searchSchema,
execute: async ({
query,
max_results,
search_depth
}: {
query: string
max_results: number
search_depth: 'basic' | 'advanced'
}) => {
uiStream.update(
<Section>
<ToolBadge tool="search">{`${query}`}</ToolBadge>
</Section>
)
uiStream.append(
<Section>
<SearchSkeleton />
</Section>
)
// Tavily API requires a minimum of 5 characters in the query
const filledQuery =
query.length < 5 ? query + ' '.repeat(5 - query.length) : query
let searchResult
try {
searchResult =
searchAPI === 'tavily'
? await tavilySearch(filledQuery, max_results, search_depth)
: await exaSearch(query)
} catch (error) {
console.error('Search API error:', error)
hasError = true
}
if (hasError) {
fullResponse += `\nAn error occurred while searching for "${query}.`
uiStream.update(
<Card className="p-4 mt-2 text-sm">
{`An error occurred while searching for "${query}".`}
</Card>
)
return searchResult
}
uiStream.update(
<Section title="Images">
<SearchResultsImageSection
images={searchResult.images}
query={searchResult.query}
/>
</Section>
)
uiStream.append(
<Section title="Sources">
<SearchResults results={searchResult.results} />
</Section>
)
uiStream.append(answerSection)
return searchResult
}
}
}
})
const toolCalls: ToolCallPart[] = []
const toolResponses: ToolResultPart[] = []
for await (const delta of result.fullStream) {
console.log("====GPT4===", delta)
switch (delta.type) {
case 'text-delta':
if (delta.textDelta) {
// If the first text delata is available, add a ui section
if (fullResponse.length === 0 && delta.textDelta.length > 0) {
// Update the UI
uiStream.update(answerSection)
}
fullResponse += delta.textDelta
streamText.update(fullResponse)
}
break
case 'tool-call':
toolCalls.push(delta)
break
case 'tool-result':
toolResponses.push(delta)
break
case 'error':
hasError = true
fullResponse += `\nError occurred while executing the tool`
break
}
}
messages.push({
role: 'assistant',
content: [{ type: 'text', text: fullResponse }, ...toolCalls]
})
if (toolResponses.length > 0) {
// Add tool responses to the messages
messages.push({ role: 'tool', content: toolResponses })
}
return { result, fullResponse, hasError }
}
async function tavilySearch(
query: string,
maxResults: number = 10,
searchDepth: 'basic' | 'advanced' = 'basic'
): Promise<any> {
const apiKey = process.env.TAVILY_API_KEY
const response = await fetch('https://api.tavily.com/search', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
api_key: apiKey,
query,
max_results: maxResults < 5 ? 5 : maxResults,
search_depth: searchDepth,
include_images: true,
include_answers: true
})
})
if (!response.ok) {
throw new Error(`Error: ${response.status}`)
}
const data = await response.json()
console.log("---TAVILY--response-----", data)
return data
}
async function exaSearch(query: string, maxResults: number = 10): Promise<any> {
const apiKey = process.env.EXA_API_KEY
const exa = new Exa(apiKey)
return exa.searchAndContents(query, {
highlights: true,
numResults: maxResults
})
}