feat: optimized agent response time
This commit is contained in:
parent
dd59608d73
commit
1365fecb08
@ -29,7 +29,7 @@ export default {
|
|||||||
return new Response("Missing instructions parameter", { status: 400 });
|
return new Response("Missing instructions parameter", { status: 400 });
|
||||||
}
|
}
|
||||||
|
|
||||||
const prompt = `You are an intelligent programming assistant. Please respond to the following request:
|
const prompt = `You are an intelligent programming assistant. Please respond to the following request concisely:
|
||||||
|
|
||||||
${instructions}
|
${instructions}
|
||||||
|
|
||||||
@ -41,24 +41,38 @@ If your response includes code, please format it using triple backticks (\`\`\`)
|
|||||||
print("Hello, World!")
|
print("Hello, World!")
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
Provide a clear and concise explanation along with any code snippets.`;
|
Provide a clear and concise explanation along with any code snippets. Keep your response brief and to the point`;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const anthropic = new Anthropic({ apiKey: env.ANTHROPIC_API_KEY });
|
const anthropic = new Anthropic({ apiKey: env.ANTHROPIC_API_KEY });
|
||||||
|
|
||||||
const response = await anthropic.messages.create({
|
const stream = await anthropic.messages.create({
|
||||||
model: "claude-3-opus-20240229",
|
model: "claude-3-opus-20240229",
|
||||||
max_tokens: 1024,
|
max_tokens: 1024,
|
||||||
messages: [{ role: "user", content: prompt }],
|
messages: [{ role: "user", content: prompt }],
|
||||||
|
stream: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
const assistantResponse = response.content[0].type === 'text' ? response.content[0].text : '';
|
const encoder = new TextEncoder();
|
||||||
|
|
||||||
// When sending the response, include CORS headers
|
const streamResponse = new ReadableStream({
|
||||||
return new Response(JSON.stringify({ "response": assistantResponse }), {
|
async start(controller) {
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
if (chunk.type === 'content_block_delta' && chunk.delta.type === 'text_delta') {
|
||||||
|
const bytes = encoder.encode(chunk.delta.text);
|
||||||
|
controller.enqueue(bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
controller.close();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Response(streamResponse, {
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "text/plain; charset=utf-8",
|
||||||
"Access-Control-Allow-Origin": "*",
|
"Access-Control-Allow-Origin": "*",
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"Connection": "keep-alive",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import React, { useState, useEffect, useRef } from 'react';
|
import React, { useState, useEffect, useRef, useCallback } from 'react';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
import { Send, StopCircle, Copy, Check, ChevronDown, ChevronUp, X, CornerUpLeft, Loader2 } from 'lucide-react';
|
import { Send, StopCircle, Copy, Check, ChevronDown, ChevronUp, X, CornerUpLeft, Loader2 } from 'lucide-react';
|
||||||
import ReactMarkdown from 'react-markdown';
|
import ReactMarkdown from 'react-markdown';
|
||||||
@ -39,7 +39,7 @@ export default function AIChat() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSend = async () => {
|
const handleSend = useCallback(async () => {
|
||||||
if (input.trim() === '' && !context) return;
|
if (input.trim() === '' && !context) return;
|
||||||
|
|
||||||
const newMessage: Message = {
|
const newMessage: Message = {
|
||||||
@ -51,20 +51,17 @@ export default function AIChat() {
|
|||||||
setInput('');
|
setInput('');
|
||||||
setIsContextExpanded(false);
|
setIsContextExpanded(false);
|
||||||
setIsGenerating(true);
|
setIsGenerating(true);
|
||||||
setIsLoading(true); // Set loading state to true
|
setIsLoading(true);
|
||||||
|
|
||||||
abortControllerRef.current = new AbortController();
|
abortControllerRef.current = new AbortController();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const queryParams = new URLSearchParams({
|
const queryParams = new URLSearchParams({
|
||||||
instructions: input,
|
instructions: input,
|
||||||
...(context && { context }) // Include context only if it exists
|
...(context && { context })
|
||||||
});
|
});
|
||||||
const response = await fetch(`http://127.0.0.1:8787/api?${queryParams}`, {
|
const response = await fetch(`http://127.0.0.1:8787/api?${queryParams}`, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
signal: abortControllerRef.current.signal,
|
signal: abortControllerRef.current.signal,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -72,22 +69,41 @@ export default function AIChat() {
|
|||||||
throw new Error('Failed to get AI response');
|
throw new Error('Failed to get AI response');
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json();
|
const reader = response.body?.getReader();
|
||||||
|
const decoder = new TextDecoder();
|
||||||
const assistantMessage: Message = { role: 'assistant', content: '' };
|
const assistantMessage: Message = { role: 'assistant', content: '' };
|
||||||
setMessages(prev => [...prev, assistantMessage]);
|
setMessages(prev => [...prev, assistantMessage]);
|
||||||
setIsLoading(false); // Set loading state to false once we start receiving the response
|
setIsLoading(false);
|
||||||
|
|
||||||
// Simulate text generation
|
let buffer = '';
|
||||||
for (let i = 0; i <= data.response.length; i++) {
|
const updateInterval = 100; // Update every 100ms
|
||||||
if (abortControllerRef.current.signal.aborted) {
|
let lastUpdateTime = Date.now();
|
||||||
break;
|
|
||||||
|
if (reader) {
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
buffer += decoder.decode(value, { stream: true });
|
||||||
|
|
||||||
|
const currentTime = Date.now();
|
||||||
|
if (currentTime - lastUpdateTime > updateInterval) {
|
||||||
|
setMessages(prev => {
|
||||||
|
const updatedMessages = [...prev];
|
||||||
|
const lastMessage = updatedMessages[updatedMessages.length - 1];
|
||||||
|
lastMessage.content = buffer;
|
||||||
|
return updatedMessages;
|
||||||
|
});
|
||||||
|
lastUpdateTime = currentTime;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Final update to ensure all content is displayed
|
||||||
setMessages(prev => {
|
setMessages(prev => {
|
||||||
const updatedMessages = [...prev];
|
const updatedMessages = [...prev];
|
||||||
updatedMessages[updatedMessages.length - 1].content = data.response.slice(0, i);
|
const lastMessage = updatedMessages[updatedMessages.length - 1];
|
||||||
|
lastMessage.content = buffer;
|
||||||
return updatedMessages;
|
return updatedMessages;
|
||||||
});
|
});
|
||||||
await new Promise(resolve => setTimeout(resolve, 20));
|
|
||||||
}
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error.name === 'AbortError') {
|
if (error.name === 'AbortError') {
|
||||||
@ -99,10 +115,10 @@ export default function AIChat() {
|
|||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsGenerating(false);
|
setIsGenerating(false);
|
||||||
setIsLoading(false); // Ensure loading state is set to false
|
setIsLoading(false);
|
||||||
abortControllerRef.current = null;
|
abortControllerRef.current = null;
|
||||||
}
|
}
|
||||||
};
|
}, [input, context]);
|
||||||
|
|
||||||
const handleStopGeneration = () => {
|
const handleStopGeneration = () => {
|
||||||
if (abortControllerRef.current) {
|
if (abortControllerRef.current) {
|
||||||
@ -333,4 +349,4 @@ export default function AIChat() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
Loading…
x
Reference in New Issue
Block a user