-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathpackager.ts
53 lines (43 loc) · 1.51 KB
/
packager.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import { zodToJsonSchema } from "zod-to-json-schema";
import { HandlerContext, StopEvent, WorkflowEvent } from "@llamaindex/workflow";
import { OpenAI } from "llamaindex";
import { z } from "zod";
import { Context } from "./agent";
export class PackageEvent extends WorkflowEvent<{
code: string;
}> {}
const FileSchema = z.object({
path: z.string().describe("Path to the filename, e.g., 'app/main.py'"),
content: z.string().describe("Complete content of the file"),
});
const PackageResultSchema = z.object({
files: z.array(FileSchema),
});
export type PackageResult = z.infer<typeof PackageResultSchema>;
export const packager = async (
context: HandlerContext<Context>,
ev: PackageEvent,
) => {
const { code } = ev.data;
// use own llm for extracting the files
const llm = new OpenAI({
model: "gpt-4o",
additionalChatOptions: { response_format: { type: "json_object" } },
});
const schema = JSON.stringify(zodToJsonSchema(PackageResultSchema));
const response = await llm.chat({
messages: [
{
role: "system",
content: `You are an expert in extracting single files (path and content) from one large string.\n\nGenerate a valid JSON following the given schema below:\n\n${schema}`,
},
{
role: "user",
content: `Here is the large string: \n------\n${code}\n------`,
},
],
});
const json = response.message.content as string;
const result = PackageResultSchema.parse(JSON.parse(json));
return new StopEvent<PackageResult>(result);
};