Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/components/AI/AIPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ export default function AIPanel({ projectFiles, currentProject, currentProjectId
updateFileContexts,
toggleFileSelection,
generatePromptText,
streamingContent,
} = useAI({
onAddMessage: async (content, type, mode, fileContext, editResponse) => {
return await addSpaceMessage(content, type, mode, fileContext, editResponse);
Expand Down Expand Up @@ -498,6 +499,7 @@ export default function AIPanel({ projectFiles, currentProject, currentProjectId
messages={messages}
isProcessing={isProcessing}
emptyMessage={mode === 'ask' ? t('AI.ask') : t('AI.edit')}
streamingContent={streamingContent}
onRevert={async (message: ChatSpaceMessage) => {
// Show confirmation dialog instead of executing immediately
setRevertConfirmation({ open: true, message });
Expand Down
98 changes: 94 additions & 4 deletions src/components/AI/chat/ChatContainer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,12 @@

import { Loader2, MessageSquare, Bot } from 'lucide-react';
import React, { useEffect, useRef } from 'react';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';

import ChatMessage from './ChatMessage';

import InlineHighlightedCode from '@/components/Tab/InlineHighlightedCode';
import { useTranslation } from '@/context/I18nContext';
import { useTheme } from '@/context/ThemeContext';
import type { ChatSpaceMessage } from '@/types';
Expand All @@ -15,25 +18,27 @@ interface ChatContainerProps {
messages: ChatSpaceMessage[];
isProcessing: boolean;
emptyMessage?: string;
streamingContent?: string;
onRevert?: (message: ChatSpaceMessage) => Promise<void>;
}

export default function ChatContainer({
messages,
isProcessing,
emptyMessage = 'AIとチャットを開始してください',
streamingContent = '',
onRevert,
}: ChatContainerProps) {
const { colors } = useTheme();
const { t } = useTranslation();
const scrollRef = useRef<HTMLDivElement>(null);

// Auto scroll to bottom when new messages arrive
// Auto scroll to bottom when new messages arrive or streaming content updates
useEffect(() => {
if (scrollRef.current) {
scrollRef.current.scrollTop = scrollRef.current.scrollHeight;
}
}, [messages.length, isProcessing]);
}, [messages.length, isProcessing, streamingContent]);

return (
<div
Expand Down Expand Up @@ -65,8 +70,93 @@ export default function ChatContainer({
/>
))}

{/* Processing indicator */}
{isProcessing && (
{/* Streaming message display */}
{isProcessing && streamingContent && (
<div className="w-full flex gap-2 flex-row">
{/* Avatar */}
<div
className="flex-shrink-0 w-6 h-6 rounded-full flex items-center justify-center"
style={{
background: colors.mutedBg,
border: `1px solid ${colors.border}`,
}}
>
<Bot size={12} style={{ color: colors.foreground }} />
</div>

{/* Streaming content */}
<div className="flex-1 min-w-0 max-w-[90%]">
<div
className="relative rounded-lg px-3 py-2 text-xs"
style={{
background: colors.mutedBg,
color: colors.foreground,
border: `1px solid ${colors.border}`,
}}
>
<div className="prose prose-sm max-w-none">
<ReactMarkdown
remarkPlugins={[remarkGfm]}
components={{
code({ className, children, ...props }: any) {
const match = /language-(\w+)/.exec(className || '');
const language = match ? match[1] : '';
const inline = !language;

if (!inline && language) {
return (
<InlineHighlightedCode
language={language}
value={String(children).replace(/\n$/, '')}
/>
);
}

return (
<code
className="px-1 py-0.5 rounded text-[11px] font-mono"
style={{
background: 'rgba(0, 0, 0, 0.1)',
color: colors.foreground,
}}
{...props}
>
{children}
</code>
);
},
p: ({ children }) => <p className="mb-1.5 last:mb-0 leading-relaxed text-xs">{children}</p>,
h1: ({ children }) => <h1 className="text-sm font-bold mb-1.5 mt-2 first:mt-0">{children}</h1>,
h2: ({ children }) => <h2 className="text-xs font-bold mb-1 mt-1.5 first:mt-0">{children}</h2>,
h3: ({ children }) => <h3 className="text-xs font-semibold mb-1 mt-1 first:mt-0">{children}</h3>,
ul: ({ children }) => <ul className="list-disc list-inside mb-1.5 space-y-0.5 text-xs">{children}</ul>,
ol: ({ children }) => <ol className="list-decimal list-inside mb-1.5 space-y-0.5 text-xs">{children}</ol>,
li: ({ children }) => <li className="ml-1 text-xs">{children}</li>,
blockquote: ({ children }) => (
<blockquote
className="border-l-2 pl-2 py-0.5 my-1 italic text-xs"
style={{ borderColor: colors.accent }}
>
{children}
</blockquote>
),
}}
>
{streamingContent}
</ReactMarkdown>
</div>
{/* Streaming indicator */}
<div className="flex items-center gap-1 mt-1 opacity-60">
<Loader2 size={10} className="animate-spin" />
<span className="text-[10px]">{t('ai.chatContainer.generating')}</span>
</div>
</div>
</div>
</div>
)}

{/* Processing indicator (shown when no streaming content yet) */}
{isProcessing && !streamingContent && (
<div className="flex gap-2">
<div
className="flex-shrink-0 w-6 h-6 rounded-full flex items-center justify-center"
Expand Down
160 changes: 128 additions & 32 deletions src/engine/ai/fetchAI.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,34 @@
// src/utils/ai/geminiClient.ts
const GEMINI_API_URL =
'https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent';
const GEMINI_STREAM_API_URL =
'https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash-live-001:streamGenerateContent';

export async function generateCodeEdit(prompt: string, apiKey: string): Promise<string> {
/**
* Stream chat response from Gemini API
* @param message - User message
* @param context - Context strings
* @param apiKey - Gemini API key
* @param onChunk - Callback for each chunk of text
*/
export async function streamChatResponse(
message: string,
context: string[],
apiKey: string,
onChunk: (chunk: string) => void
): Promise<void> {
if (!apiKey) throw new Error('Gemini API key is missing');

const contextText = context.length > 0 ? `\n\n参考コンテキスト:\n${context.join('\n---\n')}` : '';
const prompt = `${message}${contextText}`;

try {
const response = await fetch(`${GEMINI_API_URL}?key=${apiKey}`, {
const response = await fetch(`${GEMINI_STREAM_API_URL}?key=${apiKey}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contents: [{ parts: [{ text: prompt }] }],
generationConfig: {
temperature: 0.1, // より確実な回答のため温度を下げる
maxOutputTokens: 4096,
temperature: 0.7,
maxOutputTokens: 2048,
},
}),
});
Expand All @@ -22,41 +37,82 @@ export async function generateCodeEdit(prompt: string, apiKey: string): Promise<
throw new Error(`HTTP error! status: ${response.status}`);
}

const data = await response.json();
const result = data?.candidates?.[0]?.content?.parts?.[0]?.text;
if (!response.body) {
throw new Error('Response body is null');
}

const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';

while (true) {
const { done, value } = await reader.read();
if (done) break;

console.log('[original response]', result);
buffer += decoder.decode(value, { stream: true });

// Split by lines and process complete JSON objects
const lines = buffer.split('\n');

// Keep the last incomplete line in the buffer
buffer = lines.pop() || '';

if (!result) {
throw new Error('No response from Gemini API');
for (const line of lines) {
const trimmedLine = line.trim();
if (!trimmedLine) continue;

try {
const parsed = JSON.parse(trimmedLine);
const text = parsed?.candidates?.[0]?.content?.parts?.[0]?.text;
if (text) {
onChunk(text);
}
} catch (e) {
// Skip invalid JSON lines
console.warn('[streamChatResponse] Failed to parse chunk:', trimmedLine.substring(0, 100));
}
}
}

return result;
// Process any remaining buffer
if (buffer.trim()) {
try {
const parsed = JSON.parse(buffer.trim());
const text = parsed?.candidates?.[0]?.content?.parts?.[0]?.text;
if (text) {
onChunk(text);
}
} catch (e) {
console.warn('[streamChatResponse] Failed to parse final chunk');
}
}
} catch (error) {
throw new Error('Gemini API error: ' + (error as Error).message);
throw new Error('Gemini API streaming error: ' + (error as Error).message);
}
}

export async function generateChatResponse(
message: string,
context: string[],
apiKey: string
): Promise<string> {
/**
* Stream code edit response from Gemini API
* @param prompt - Edit prompt
* @param apiKey - Gemini API key
* @param onChunk - Callback for each chunk of text
*/
export async function streamCodeEdit(
prompt: string,
apiKey: string,
onChunk: (chunk: string) => void
): Promise<void> {
if (!apiKey) throw new Error('Gemini API key is missing');

const contextText = context.length > 0 ? `\n\n参考コンテキスト:\n${context.join('\n---\n')}` : '';

const prompt = `${message}${contextText}`;

try {
const response = await fetch(`${GEMINI_API_URL}?key=${apiKey}`, {
const response = await fetch(`${GEMINI_STREAM_API_URL}?key=${apiKey}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contents: [{ parts: [{ text: prompt }] }],
generationConfig: {
temperature: 0.7,
maxOutputTokens: 2048,
temperature: 0.1,
maxOutputTokens: 4096,
},
}),
});
Expand All @@ -65,16 +121,56 @@ export async function generateChatResponse(
throw new Error(`HTTP error! status: ${response.status}`);
}

const data = await response.json();
const result = data?.candidates?.[0]?.content?.parts?.[0]?.text;
if (!response.body) {
throw new Error('Response body is null');
}

const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';

while (true) {
const { done, value } = await reader.read();
if (done) break;

if (!result) {
throw new Error('No response from Gemini API');
buffer += decoder.decode(value, { stream: true });

// Split by lines and process complete JSON objects
const lines = buffer.split('\n');

// Keep the last incomplete line in the buffer
buffer = lines.pop() || '';

for (const line of lines) {
const trimmedLine = line.trim();
if (!trimmedLine) continue;

try {
const parsed = JSON.parse(trimmedLine);
const text = parsed?.candidates?.[0]?.content?.parts?.[0]?.text;
if (text) {
onChunk(text);
}
} catch (e) {
// Skip invalid JSON lines
console.warn('[streamCodeEdit] Failed to parse chunk:', trimmedLine.substring(0, 100));
}
}
}
console.log('[original response]', result);

return result;
// Process any remaining buffer
if (buffer.trim()) {
try {
const parsed = JSON.parse(buffer.trim());
const text = parsed?.candidates?.[0]?.content?.parts?.[0]?.text;
if (text) {
onChunk(text);
}
} catch (e) {
console.warn('[streamCodeEdit] Failed to parse final chunk');
}
}
} catch (error) {
throw new Error('Gemini API error: ' + (error as Error).message);
throw new Error('Gemini API streaming error: ' + (error as Error).message);
}
}
Loading