Skip to content

Commit

Permalink
Merge pull request #239 from miurla/anthropic
Browse files Browse the repository at this point in the history
Add Anthropic Support
  • Loading branch information
miurla committed Jun 21, 2024
2 parents 6f1c46e + b9d3e64 commit 6388cdc
Show file tree
Hide file tree
Showing 12 changed files with 115 additions and 69 deletions.
3 changes: 3 additions & 0 deletions .env.local.example
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ UPSTASH_REDIS_REST_TOKEN=[YOUR_UPSTASH_REDIS_REST_TOKEN]
# Google Generative AI API key retrieved here: https://aistudio.google.com/app/apikey
# GOOGLE_GENERATIVE_AI_API_KEY=[YOUR_GOOGLE_GENERATIVE_AI_API_KEY]

# If you want to use Anthropic instead of OpenAI, enable the following settings.
# ANTHROPIC_API_KEY=[YOUR_ANTHROPIC_API_KEY]

# [Unstable] If you want to use Ollama, enable the following variables.
# OLLAMA_MODEL=[YOUR_OLLAMA_MODEL] # The main model to use. Recommended: mistral or openhermes
# OLLAMA_SUB_MODEL=[YOUR_OLLAMA_SUB_MODEL] # The sub model to use. Recommended: phi3 or llama3
Expand Down
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,9 @@ An AI-powered search engine with a generative UI.
- Get answers from specified URLs
- Use as a search engine [](#-search-engine)
- Support for providers other than OpenAI
- Google Generative AI Provider support [](https://github.com/miurla/morphic/issues/192)
- Ollama Provider support ([Unstable](https://github.com/miurla/morphic/issues/215))
- Google Generative AI Provider [](https://github.com/miurla/morphic/issues/192)
- Anthropic Provider [](https://github.com/miurla/morphic/pull/239)
- Ollama Provider ([Unstable](https://github.com/miurla/morphic/issues/215))
- Specify the model to generate answers
- Groq API support [](https://github.com/miurla/morphic/pull/58)

Expand Down
36 changes: 20 additions & 16 deletions app/actions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import { VideoSearchSection } from '@/components/video-search-section'
import { transformToolMessages } from '@/lib/utils'
import { AnswerSection } from '@/components/answer-section'
import { ErrorCard } from '@/components/error-card'
import { use } from 'react'

async function submit(
formData?: FormData,
Expand Down Expand Up @@ -101,6 +102,9 @@ async function submit(
}

async function processEvents() {
// Show the spinner
uiStream.append(<Spinner />)

let action = { object: { next: 'proceed' } }
// If the user skips the task, we proceed to the search
if (!skip) action = (await taskManager(messages)) ?? action
Expand Down Expand Up @@ -131,24 +135,26 @@ async function submit(

// Generate the answer
let answer = ''
let stopReason = ''
let toolOutputs: ToolResultPart[] = []
let errorOccurred = false

const streamText = createStreamableValue<string>()
uiStream.update(
<AnswerSection result={streamText.value} hasHeader={false} />
)

// If useSpecificAPI is enabled, only function calls will be made
// If not using a tool, this model generates the answer
while (
useSpecificAPI
? toolOutputs.length === 0 && answer.length === 0
: answer.length === 0 && !errorOccurred
? toolOutputs.length === 0 && answer.length === 0 && !errorOccurred
: stopReason !== 'stop' && !errorOccurred
) {
// Search the web and generate the answer
const { fullResponse, hasError, toolResponses } = await researcher(
uiStream,
streamText,
messages,
useSpecificAPI
)
const { fullResponse, hasError, toolResponses, finishReason } =
await researcher(uiStream, streamText, messages)
stopReason = finishReason || ''
answer = fullResponse
toolOutputs = toolResponses
errorOccurred = hasError
Expand Down Expand Up @@ -177,13 +183,13 @@ async function submit(
// modify the messages to be used by the specific model
const modifiedMessages = transformToolMessages(messages)
const latestMessages = modifiedMessages.slice(maxMessages * -1)
const { response, hasError } = await writer(
uiStream,
streamText,
latestMessages
)
const { response, hasError } = await writer(uiStream, latestMessages)
answer = response
errorOccurred = hasError
messages.push({
role: 'assistant',
content: answer
})
}

if (!errorOccurred) {
Expand Down Expand Up @@ -396,9 +402,7 @@ export const getUIStateFromAIState = (aiState: Chat) => {
return {
id,
component: (
<Section title="Related" separator={true}>
<SearchRelated relatedQueries={relatedQueries.value} />
</Section>
<SearchRelated relatedQueries={relatedQueries.value} />
)
}
case 'followup':
Expand Down
Binary file modified bun.lockb
Binary file not shown.
8 changes: 6 additions & 2 deletions components/answer-section.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,13 @@ import { useEffect, useState } from 'react'

export type AnswerSectionProps = {
result?: StreamableValue<string>
hasHeader?: boolean
}

export function AnswerSection({ result }: AnswerSectionProps) {
export function AnswerSection({
result,
hasHeader = true
}: AnswerSectionProps) {
const [data, error, pending] = useStreamableValue(result)
const [content, setContent] = useState<string>('')

Expand All @@ -22,7 +26,7 @@ export function AnswerSection({ result }: AnswerSectionProps) {
return (
<div>
{content.length > 0 ? (
<Section title="Answer">
<Section title={hasHeader ? 'Answer' : undefined}>
<BotMessage content={content} />
</Section>
) : (
Expand Down
47 changes: 28 additions & 19 deletions components/search-related.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import {
import { AI } from '@/app/actions'
import { UserMessage } from './user-message'
import { PartialRelated } from '@/lib/schema/related'
import { Section } from './section'
import { Skeleton } from './ui/skeleton'

export interface SearchRelatedProps {
relatedQueries: StreamableValue<PartialRelated>
Expand All @@ -26,6 +28,7 @@ export const SearchRelated: React.FC<SearchRelatedProps> = ({
const [related, setRelated] = useState<PartialRelated>()

useEffect(() => {
console.log('data', data)
if (!data) return
setRelated(data)
}, [data])
Expand Down Expand Up @@ -56,25 +59,31 @@ export const SearchRelated: React.FC<SearchRelatedProps> = ({
])
}

return (
<form onSubmit={handleSubmit} className="flex flex-wrap">
{related?.items
?.filter(item => item?.query !== '')
.map((item, index) => (
<div className="flex items-start w-full" key={index}>
<ArrowRight className="h-4 w-4 mr-2 mt-1 flex-shrink-0 text-accent-foreground/50" />
<Button
variant="link"
className="flex-1 justify-start px-0 py-1 h-fit font-semibold text-accent-foreground/50 whitespace-normal text-left"
type="submit"
name={'related_query'}
value={item?.query}
>
{item?.query}
</Button>
</div>
))}
</form>
return related ? (
<Section title="Related" separator={true}>
<form onSubmit={handleSubmit} className="flex flex-wrap">
{related?.items
?.filter(item => item?.query !== '')
.map((item, index) => (
<div className="flex items-start w-full" key={index}>
<ArrowRight className="h-4 w-4 mr-2 mt-1 flex-shrink-0 text-accent-foreground/50" />
<Button
variant="link"
className="flex-1 justify-start px-0 py-1 h-fit font-semibold text-accent-foreground/50 whitespace-normal text-left"
type="submit"
name={'related_query'}
value={item?.query}
>
{item?.query}
</Button>
</div>
))}
</form>
</Section>
) : error ? null : (
<Section title="Related" separator={true}>
<Skeleton className="w-full h-6" />
</Section>
)
}

Expand Down
16 changes: 9 additions & 7 deletions lib/agents/query-suggestor.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { createStreamableUI, createStreamableValue } from 'ai/rsc'
import { CoreMessage, streamObject } from 'ai'
import { PartialRelated, relatedSchema } from '@/lib/schema/related'
import { Section } from '@/components/section'
import SearchRelated from '@/components/search-related'
import { getModel } from '../utils'

Expand All @@ -10,11 +9,14 @@ export async function querySuggestor(
messages: CoreMessage[]
) {
const objectStream = createStreamableValue<PartialRelated>()
uiStream.append(
<Section title="Related" separator={true}>
<SearchRelated relatedQueries={objectStream.value} />
</Section>
)
uiStream.append(<SearchRelated relatedQueries={objectStream.value} />)

const lastMessages = messages.slice(-1).map(message => {
return {
...message,
role: 'user'
}
}) as CoreMessage[]

let finalRelatedQueries: PartialRelated = {}
await streamObject({
Expand All @@ -33,7 +35,7 @@ export async function querySuggestor(
Aim to create queries that progressively delve into more specific aspects, implications, or adjacent topics related to the initial query. The goal is to anticipate the user's potential information needs and guide them towards a more comprehensive understanding of the subject matter.
Please match the language of the response to the user's language.`,
messages,
messages: lastMessages,
schema: relatedSchema
})
.then(async result => {
Expand Down
36 changes: 24 additions & 12 deletions lib/agents/researcher.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,15 @@ import { CoreMessage, ToolCallPart, ToolResultPart, streamText } from 'ai'
import { getTools } from './tools'
import { getModel, transformToolMessages } from '../utils'
import { AnswerSection } from '@/components/answer-section'
import { AnswerSectionGenerated } from '@/components/answer-section-generated'

export async function researcher(
uiStream: ReturnType<typeof createStreamableUI>,
streamableText: ReturnType<typeof createStreamableValue<string>>,
messages: CoreMessage[],
useSpecificModel?: boolean
messages: CoreMessage[]
) {
let fullResponse = ''
let hasError = false
let finishReason = ''

// Transform the messages if using Ollama provider
let processedMessages = messages
Expand All @@ -25,7 +24,9 @@ export async function researcher(
const includeToolResponses = messages.some(message => message.role === 'tool')
const useSubModel = useOllamaProvider && includeToolResponses

const answerSection = <AnswerSection result={streamableText.value} />
const streambleAnswer = createStreamableValue<string>('')
const answerSection = <AnswerSection result={streambleAnswer.value} />

const currentDate = new Date().toLocaleString()
const result = await streamText({
model: getModel(useSubModel),
Expand All @@ -39,12 +40,18 @@ export async function researcher(
The number must always match the order of the search results.
The retrieve tool can only be used with URLs provided by the user. URLs from search results cannot be used.
If it is a domain instead of a URL, specify it in the include_domains of the search tool.
Please match the language of the response to the user's language. Current date and time: ${currentDate}`,
Please match the language of the response to the user's language. Current date and time: ${currentDate}
`,
messages: processedMessages,
tools: getTools({
uiStream,
fullResponse
})
}),
onFinish: async event => {
finishReason = event.finishReason
fullResponse = event.text
streambleAnswer.done()
}
}).catch(err => {
hasError = true
fullResponse = 'Error: ' + err.message
Expand All @@ -56,6 +63,11 @@ export async function researcher(
return { result, fullResponse, hasError, toolResponses: [] }
}

const hasToolResult = messages.some(message => message.role === 'tool')
if (hasToolResult) {
uiStream.append(answerSection)
}

// Process the response
const toolCalls: ToolCallPart[] = []
const toolResponses: ToolResultPart[] = []
Expand All @@ -64,17 +76,17 @@ export async function researcher(
case 'text-delta':
if (delta.textDelta) {
fullResponse += delta.textDelta
streamableText.update(fullResponse)
if (hasToolResult) {
streambleAnswer.update(fullResponse)
} else {
streamableText.update(fullResponse)
}
}
break
case 'tool-call':
toolCalls.push(delta)
break
case 'tool-result':
// Append the answer section if the specific model is not used
if (!useSpecificModel && toolResponses.length === 0 && delta.result) {
uiStream.append(answerSection)
}
if (!delta.result) {
hasError = true
}
Expand All @@ -97,5 +109,5 @@ export async function researcher(
messages.push({ role: 'tool', content: toolResponses })
}

return { result, fullResponse, hasError, toolResponses }
return { result, fullResponse, hasError, toolResponses, finishReason }
}
2 changes: 1 addition & 1 deletion lib/agents/tools/search.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export const searchTool = ({ uiStream, fullResponse }: ToolProps) => tool({
let hasError = false
// Append the search section
const streamResults = createStreamableValue<string>()
uiStream.append(
uiStream.update(
<SearchSection
result={streamResults.value}
includeDomains={include_domains}
Expand Down
13 changes: 6 additions & 7 deletions lib/agents/writer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@ import { AnswerSectionGenerated } from '@/components/answer-section-generated'

export async function writer(
uiStream: ReturnType<typeof createStreamableUI>,
streamableText: ReturnType<typeof createStreamableValue<string>>,
messages: CoreMessage[]
) {
let fullResponse = ''
let hasError = false
const answerSection = <AnswerSection result={streamableText.value} />
const streamableAnswer = createStreamableValue<string>('')
const answerSection = <AnswerSection result={streamableAnswer.value} />
uiStream.append(answerSection)

const openai = createOpenAI({
Expand All @@ -31,23 +31,22 @@ export async function writer(
`,
messages,
onFinish: event => {
// If the response is generated, update the generated answer section
// There is a bug where a new instance of the answer section is displayed once when the next section is added
uiStream.update(<AnswerSectionGenerated result={event.text} />)
fullResponse = event.text
streamableAnswer.done(event.text)
}
})
.then(async result => {
for await (const text of result.textStream) {
if (text) {
fullResponse += text
streamableText.update(fullResponse)
streamableAnswer.update(fullResponse)
}
}
})
.catch(err => {
hasError = true
fullResponse = 'Error: ' + err.message
streamableText.update(fullResponse)
streamableAnswer.update(fullResponse)
})

return { response: fullResponse, hasError }
Expand Down
Loading

0 comments on commit 6388cdc

Please sign in to comment.