Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ export type Content = {
// For streaming support - shows in-progress tool calls
toolCalls?: ToolCall[];
thinking?: boolean;
// AI-generated follow-up suggestions
suggestions?: string[];
};

export type MeshFileType = string;
Expand All @@ -57,6 +59,7 @@ export type ParametricArtifact = {
version: string;
code: string;
parameters: Parameter[];
suggestions?: string[];
};

export type ParameterOption = { value: string | number; label: string };
Expand Down
80 changes: 79 additions & 1 deletion src/components/chat/ChatSection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { ScrollArea } from '@/components/ui/scroll-area';
import { Message, Model } from '@shared/types';
import TextAreaChat from '@/components/TextAreaChat';
import { SuggestionPills } from '@/components/chat/SuggestionPills';
import { AssistantMessage } from '@/components/chat/AssistantMessage';
import { UserMessage } from '@/components/chat/UserMessage';
import { useConversation } from '@/services/conversationService';
import { AssistantLoading } from '@/components/chat/AssistantLoading';
import { ChatTitle } from '@/components/chat/ChatTitle';
import { TreeNode } from '@shared/Tree';
import { PARAMETRIC_MODELS } from '@/lib/utils';
import { supabase } from '@/lib/supabase';
import {
useIsLoading,
useSendContentMutation,
Expand All @@ -28,7 +30,9 @@ export function ChatSection({ messages }: ChatSectionProps) {
// Sync model selection with the conversation history (last used model)
useEffect(() => {
if (messages.length > 0) {
const lastAssistantMessage = [...messages].reverse().find((m) => m.role === 'assistant');
const lastAssistantMessage = [...messages]
.reverse()
.find((m) => m.role === 'assistant');
if (lastAssistantMessage?.content?.model) {
setModel(lastAssistantMessage.content.model);
}
Expand Down Expand Up @@ -67,6 +71,75 @@ export function ChatSection({ messages }: ChatSectionProps) {
return messages[messages.length - 1];
}, [messages, conversation.current_message_leaf_id]);

// Generate suggestions based on the last user message
const [suggestions, setSuggestions] = useState<string[]>([]);
const lastUserMessage = useMemo(() => {
return [...messages].reverse().find((m) => m.role === 'user');
}, [messages]);

// Generate suggestions when loading completes and we have a new assistant response
useEffect(() => {
// Don't generate while loading
if (isLoading) {
setSuggestions([]);
return;
}

// Need a user message to base suggestions on
const userPrompt = lastUserMessage?.content?.text;
if (!userPrompt) {
setSuggestions([]);
return;
}

// Check if the last message is an assistant message with an artifact (model was generated)
const lastMsg = messages[messages.length - 1];
if (lastMsg?.role !== 'assistant' || !lastMsg?.content?.artifact) {
setSuggestions([]);
return;
}

// Get the generated code for context
const generatedCode = lastMsg.content.artifact.code;
const parameters = lastMsg.content.artifact.parameters;

// Generate suggestions
const generateSuggestions = async () => {
try {
const { data, error } = await supabase.functions.invoke(
'suggestion-generator',
{
body: {
userPrompt,
generatedCode,
parameters,
},
},
);

if (error) throw error;
if (data?.suggestions) {
setSuggestions(data.suggestions);
}
} catch (err) {
console.error('Failed to generate suggestions:', err);
setSuggestions([]);
}
};
Comment on lines 109 to 133
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style: no retry logic if suggestion generation fails - users won't see suggestions after a transient network error. consider persisting failure state to avoid repeated failed calls

Prompt To Fix With AI
This is a comment left during a code review.
Path: src/components/chat/ChatSection.tsx
Line: 107:128

Comment:
**style:** no retry logic if suggestion generation fails - users won't see suggestions after a transient network error. consider persisting failure state to avoid repeated failed calls

How can I resolve this? If you propose a fix, please make it concise.


generateSuggestions();
}, [isLoading, lastUserMessage, messages]);

const handleSuggestionSelect = useCallback(
(suggestion: string) => {
sendMessage({
text: suggestion,
model: model,
});
},
[model, sendMessage],
);

// Get the current version number based on assistant messages only
const getCurrentVersion = useCallback(
(index: number) => {
Expand Down Expand Up @@ -111,6 +184,11 @@ export function ChatSection({ messages }: ChatSectionProps) {
</div>
</ScrollArea>
<div className="w-full min-w-52 max-w-xl bg-transparent px-4 pb-6">
<SuggestionPills
suggestions={suggestions}
onSelect={handleSuggestionSelect}
disabled={isLoading}
/>
<TextAreaChat
onSubmit={sendMessage}
placeholder="Keep iterating with Adam..."
Expand Down
36 changes: 36 additions & 0 deletions src/components/chat/SuggestionPills.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { Button } from '@/components/ui/button';
import { cn } from '@/lib/utils';

interface SuggestionPillsProps {
suggestions: string[];
onSelect: (suggestion: string) => void;
disabled?: boolean;
}

export function SuggestionPills({
disabled,
suggestions,
onSelect,
}: SuggestionPillsProps) {
if (!suggestions.length) return null;

return (
<div className="scrollbar-hide flex gap-2 overflow-x-auto pb-2 [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden">
{suggestions.map((suggestion, index) => (
<Button
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style: using array index as key can cause rendering issues if suggestions change. use the suggestion text itself as the key since suggestions are unique

Prompt To Fix With AI
This is a comment left during a code review.
Path: src/components/chat/SuggestionPills.tsx
Line: 20:20

Comment:
**style:** using array index as `key` can cause rendering issues if suggestions change. use the suggestion text itself as the key since suggestions are unique

How can I resolve this? If you propose a fix, please make it concise.

key={index}
variant="outline"
size="sm"
className={cn(
'shrink-0 rounded-full border border-adam-neutral-700 bg-adam-neutral-800 text-xs text-white hover:text-white hover:opacity-80',
disabled ? 'opacity-50' : '',
)}
onClick={() => onSelect(suggestion)}
disabled={disabled}
>
{suggestion}
</Button>
))}
</div>
);
}
10 changes: 10 additions & 0 deletions supabase/functions/suggestion-generator/deno.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"imports": {
"@shared/": "../../../shared/"
},
"lint": {
"rules": {
"exclude": ["no-import-prefix", "no-unversioned-import"]
}
}
}
124 changes: 124 additions & 0 deletions supabase/functions/suggestion-generator/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
import 'jsr:@supabase/functions-js/edge-runtime.d.ts';
import { corsHeaders } from '../_shared/cors.ts';

const OPENROUTER_API_URL = 'https://openrouter.ai/api/v1/chat/completions';
const OPENROUTER_API_KEY = Deno.env.get('OPENROUTER_API_KEY') ?? '';

Deno.serve(async (req) => {
if (req.method === 'OPTIONS') {
return new Response('ok', { headers: corsHeaders });
}

if (req.method !== 'POST') {
return new Response('Method not allowed', { status: 405 });
}
Comment on lines +8 to +15
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style: add function registration to config.toml with verify_jwt = true per backend architecture guidelines

Prompt To Fix With AI
This is a comment left during a code review.
Path: supabase/functions/suggestion-generator/index.ts
Line: 7:14

Comment:
**style:** add function registration to `config.toml` with `verify_jwt = true` per backend architecture guidelines

How can I resolve this? If you propose a fix, please make it concise.


try {
const { userPrompt, generatedCode, parameters } = await req.json();

if (!userPrompt) {
return new Response(JSON.stringify({ suggestions: [] }), {
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
});
}

// Build parameter summary for context
const paramSummary = parameters
?.map(
(p: { name: string; value: string | number | boolean }) =>
`${p.name}=${p.value}`,
)
.join(', ');
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style: parameters is typed inline - consider using the Parameter type from @shared/types.ts for consistency with the rest of the codebase

Prompt To Fix With AI
This is a comment left during a code review.
Path: supabase/functions/suggestion-generator/index.ts
Line: 26:31

Comment:
**style:** `parameters` is typed inline - consider using the `Parameter` type from `@shared/types.ts` for consistency with the rest of the codebase

How can I resolve this? If you propose a fix, please make it concise.


const suggestionPrompt = `You are helping a user iterate on a 3D CAD model.
USER REQUEST: "${userPrompt}"
CURRENT PARAMETERS: ${paramSummary || 'none'}
GENERATED CODE:
\`\`\`openscad
${generatedCode?.slice(0, 1500) || 'No code available'}
\`\`\`
Based on the ACTUAL model above, suggest exactly 2 specific improvements the user could make next.
Your suggestions should:
- Reference actual parameters or features in the code (e.g., if there's cup_height, suggest "Taller cup" not generic "Make bigger")
- Be actionable modifications (2-4 words)
- Be different from each other (one could adjust a dimension, another could add a feature)
DO NOT suggest:
- Generic things like "Add more detail" or "Improve design"
- Exporting, rendering, or color changes
- Things already in the model
Return exactly 2 suggestions:
<suggestion>First suggestion</suggestion>
<suggestion>Second suggestion</suggestion>`;

const response = await fetch(OPENROUTER_API_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${OPENROUTER_API_KEY}`,
'HTTP-Referer': 'https://adam-cad.com',
'X-Title': 'Adam CAD',
},
body: JSON.stringify({
model: 'anthropic/claude-3.5-haiku',
max_tokens: 100,
messages: [
{
role: 'user',
content: suggestionPrompt,
},
],
}),
});

if (!response.ok) {
throw new Error(`OpenRouter API error: ${response.statusText}`);
}

const data = await response.json();
let suggestions: string[] = [];

if (data.choices && data.choices[0]?.message?.content) {
const responseText = data.choices[0].message.content;
const suggestionRegex = /<suggestion>(.*?)<\/suggestion>/gi;
const matches = responseText.matchAll(suggestionRegex);

suggestions = Array.from(
new Set(
Array.from(matches)
.map(([, text]) => {
if (!text) return null;
const cleaned = text
.trim()
.replace(/[""'']/g, '')
.replace(/^["']|["']$/g, '')
.trim();
const words = cleaned.split(/\s+/);
if (words.length > 5) return null;
return words
.map(
(w) => w.charAt(0).toUpperCase() + w.slice(1).toLowerCase(),
)
.join(' ');
})
.filter((s): s is string => s !== null && s.length > 0),
),
).slice(0, 2);
}

return new Response(JSON.stringify({ suggestions }), {
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
});
} catch (error) {
console.error('Error generating suggestions:', error);
return new Response(JSON.stringify({ suggestions: [] }), {
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
});
}
});