Files
sumpin/generate-template.ts
geoffsee 8545aa8699 add code
2025-07-11 16:18:34 -04:00

126 lines
4.8 KiB
TypeScript
Executable File
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env bun
/*
* hierarchyGenerator.ts
* ------------------------------------------------------------
* A tiny OpenAIpowered helper that turns naturallanguage prompts
* into domain hierarchies matching `HierarchyModel` (mobxstatetree).
* ------------------------------------------------------------
* Usage example:
* import { generateHierarchy } from "./hierarchyGenerator";
* const hierarchy = await generateHierarchy("Create a v2 Healthcare hierarchy for mental health services");
*
* The function returns a live MST instance of `HierarchyModel` so it
* can be plugged straight into your state tree or persisted as JSON.
*/
import OpenAI from "openai";
import { HierarchyModel } from "./lib/hierarchy-model.ts";
import type { Instance } from "mobx-state-tree";
// ---------------------------------------------------------------------------
// Type Definitions
// ---------------------------------------------------------------------------
/**
* Shape produced by the LLM and accepted by `HierarchyModel`.
*/
export interface Hierarchy {
version: "v1" | "v2";
domain: string; // e.g. "Finance", "Technology"
structure: string[]; // ordered list of hierarchy labels
description: string; // plaintext description
commonSkills: string[];
commonTools: string[];
examples: string[];
}
// ---------------------------------------------------------------------------
// OpenAI client configuration
// ---------------------------------------------------------------------------
const openai = new OpenAI({
// Rely on OPENAI_API_KEY env var or pass explicit key here
apiKey: process.env.OPENAI_API_KEY,
});
// System prompt used for every request. Keeps the model focused on
// emitting strict JSON with NO extra text.
const SYS_PROMPT = `
You are an API that converts naturallanguage descriptions into JSON
objects that conform **exactly** to the following TypeScript interface.
Return the JSON only no markdown, comments, or additional keys.
If a field is missing in the user's request, make a sensible inference.
interface Hierarchy {
version: "v1" | "v2"; // one of the two schema versions
domain: string; // highlevel sector name
structure: string[]; // ordered labels, 4 elements for v1, 6 for v2
description: string; // concise explanation of the hierarchy
commonSkills: string[]; // 37 bullet items
commonTools: string[]; // 37 bullet items
examples: string[]; // 37 representative examples
}
`;
// ---------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------
/**
* Convert a naturallanguage prompt into a fullytyped `HierarchyModel`.
*
* @param nlPrompt Human description, e.g. "Define a v1 hierarchy for legal services"
* @param model (optional) Which OpenAI model to use. Defaults to GPT4omini.
*/
export async function generateHierarchy(
nlPrompt: string,
model: string = "gpt-4o-mini"
): Promise<Instance<typeof HierarchyModel>> {
const chat = await openai.chat.completions.create({
model,
response_format: { type: "json_object" }, // guarantees pure JSON
messages: [
{ role: "system", content: SYS_PROMPT },
{ role: "user", content: nlPrompt },
],
});
// Defensive parsing — in rare cases the assistant may wrap JSON in text.
const raw = chat.choices[0]?.message?.content ?? "{}";
let data: Hierarchy;
try {
data = JSON.parse(raw) as Hierarchy;
} catch {
// Attempt to salvage JSON embedded in text
const match = raw.match(/\{[\s\S]*\}/);
if (!match) throw new Error("Failed to parse JSON from LLM response");
data = JSON.parse(match[0]) as Hierarchy;
}
// Validate minimal shape before creating MST instance.
if (!data.version || !data.domain || !data.structure) {
throw new Error("Incomplete hierarchy returned by LLM");
}
return HierarchyModel.create(data);
}
// ---------------------------------------------------------------------------
// Helper: quick commandline demo when run with tsnode
// ---------------------------------------------------------------------------
if (require.main === module) {
(async () => {
const prompt = process.argv.slice(2).join(" ") ||
"Create a v2 Technology hierarchy focused on AI safety";
try {
const hierarchy = await generateHierarchy(prompt);
console.log(JSON.stringify(hierarchy.toJSON(), null, 2));
} catch (err) {
console.error("Error generating hierarchy:", err);
process.exitCode = 1;
}
})();
}