'+ partial.replace(/({
- prompt: buildSuggestionsPrompt({ src, prompt, schema }),
+ messages: [buildSuggestionsPrompt({ src, prompt, schema }), '```json\n{"suggestions":['],
system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential. Respond in a json block.`,
mode: "json",
});
const suggestions = getSuggestions({ result: suggestionsResult });
tap({ suggestions });
- const { result: htmlResult, pending: htmlPending } = generateData<{ html: string }>({
- prompt: buildUiPrompt({ prompt, lastSrc }),
+ const { result: htmlResult, pending: htmlPending, partial: partialHtml } = generateData<{ html: string }>({
+ messages: [buildUiPrompt({ prompt, lastSrc }), '```html\n'],
system: viewSystemPrompt({ schema }),
mode: "html",
});
- src = maybeHTML({ result: htmlResult, pending: htmlPending });
+ src = maybeHTML({ result: htmlResult });
+ let preview = maybeSRC({ src, pending: htmlPending, partial: partialHtml });
+ tap({ preview })
let firstSuggestion = getSuggestion({ suggestions, index: 0 });
let secondSuggestion = getSuggestion({ suggestions, index: 1 });
@@ -252,7 +248,7 @@ export const iframe = recipe<{
placeholder="title"
oncommon-input=${updateValue({ value: title })}
>
-
+ View Data = new Map();
-
- create(
- system: string,
- initialMessage: string,
- activeTools: CoreTool[]
- ): ConversationThread {
- const id = crypto.randomUUID();
- const thread: ConversationThread = {
- id,
- conversation: [
- {
- role: "user",
- content: [{ type: "text", text: initialMessage }],
- },
- ],
- system,
- activeTools,
- };
- this.threads.set(id, thread);
- return thread;
- }
-
- get(id: string): ConversationThread | undefined {
- return this.threads.get(id);
- }
-
- update(id: string, newMessages: CoreMessage[]): void {
- const thread = this.threads.get(id);
- if (thread) {
- thread.conversation = [...thread.conversation, ...newMessages];
- }
- }
-
- delete(id: string): void {
- this.threads.delete(id);
- }
-}
diff --git a/typescript/packages/planning-server/src/conversation_test.ts b/typescript/packages/planning-server/src/conversation_test.ts
deleted file mode 100644
index 2ea41b4be..000000000
--- a/typescript/packages/planning-server/src/conversation_test.ts
+++ /dev/null
@@ -1,58 +0,0 @@
-import { assertEquals } from "https://deno.land/std/testing/asserts.ts";
-import { InMemoryConversationThreadManager } from "./conversation.ts";
-
-Deno.test("ThreadManager - Create and Get Thread", () => {
- const manager = new InMemoryConversationThreadManager();
- const thread = manager.create("system prompt", "initial message", []);
-
- assertEquals(thread.system, "system prompt");
- assertEquals(thread.conversation.length, 1);
- assertEquals(thread.conversation[0].role, "user");
- assertEquals(thread.conversation[0].content[0].text, "initial message");
-
- const retrievedThread = manager.get(thread.id);
- assertEquals(retrievedThread, thread);
-});
-
-Deno.test("ThreadManager - Update Thread", () => {
- const manager = new InMemoryConversationThreadManager();
- const thread = manager.create("system prompt", "initial message", []);
-
- manager.update(thread.id, [
- {
- role: "assistant",
- content: [{ type: "text", text: "Hello! How can I help you?" }],
- },
- ]);
-
- const updatedThread = manager.get(thread.id);
- assertEquals(updatedThread?.conversation.length, 2);
- assertEquals(updatedThread?.conversation[1].role, "assistant");
- assertEquals(
- updatedThread?.conversation[1].content[0].text,
- "Hello! How can I help you?",
- );
-});
-
-Deno.test("ThreadManager - Set Pending Tool Calls", () => {
- const manager = new InMemoryConversationThreadManager();
- const thread = manager.create("system prompt", "initial message", []);
-
- const toolCalls = [
- { type: "tool_use", tool: { name: "calculator", arguments: "1 + 1" } },
- ];
- manager.setPendingToolCalls(thread.id, toolCalls);
-
- const updatedThread = manager.get(thread.id);
- assertEquals(updatedThread?.pendingToolCalls, toolCalls);
-});
-
-Deno.test("ThreadManager - Delete Thread", () => {
- const manager = new InMemoryConversationThreadManager();
- const thread = manager.create("system prompt", "initial message", []);
-
- manager.delete(thread.id);
-
- const deletedThread = manager.get(thread.id);
- assertEquals(deletedThread, undefined);
-});
diff --git a/typescript/packages/planning-server/src/deps.ts b/typescript/packages/planning-server/src/deps.ts
deleted file mode 100644
index 7d28ce8d0..000000000
--- a/typescript/packages/planning-server/src/deps.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-// deps.ts
-export { default as datascript } from "npm:datascript";
-import { config } from "https://deno.land/x/dotenv/mod.ts";
-export { serve } from "https://deno.land/std@0.140.0/http/server.ts";
-
-export * as ai from "npm:ai";
-export { anthropic } from "npm:@ai-sdk/anthropic";
-
-await config({ export: true });
diff --git a/typescript/packages/planning-server/src/index.ts b/typescript/packages/planning-server/src/index.ts
index 13890e2a7..028882564 100755
--- a/typescript/packages/planning-server/src/index.ts
+++ b/typescript/packages/planning-server/src/index.ts
@@ -1,64 +1,120 @@
-#!/usr/bin/env -S deno run --allow-net --allow-read --allow-env
-
-import { crypto } from "https://deno.land/std/crypto/mod.ts";
+import { serve } from "https://deno.land/std@0.140.0/http/server.ts";
+import { streamText } from "npm:ai";
import { ensureDir } from "https://deno.land/std/fs/mod.ts";
-import { CoreAssistantMessage, CoreMessage, CoreTool } from "npm:ai";
-import { ask } from "./anthropic.ts";
-import {
- ConversationThread,
- InMemoryConversationThreadManager,
-} from "./conversation.ts";
-import { serve } from "./deps.ts";
-
-const threadManager = new InMemoryConversationThreadManager();
+import { crypto } from "https://deno.land/std/crypto/mod.ts";
+import { anthropic } from "npm:@ai-sdk/anthropic";
const CACHE_DIR = "./cache";
-type CreateConversationThreadRequest = {
- action: "create";
- message: string;
- system: string;
- activeTools: CoreTool[];
-};
-
-type AppendToConversationThreadRequest = {
- action: "append";
- threadId: string;
- message?: string;
-};
-
-type ConversationThreadRequest =
- | CreateConversationThreadRequest
- | AppendToConversationThreadRequest;
-
const handler = async (request: Request): Promise => {
if (request.method === "GET") {
- return new Response("Planning Server", { status: 200 });
- } else if (request.method === "POST") {
+ return new Response("Hello World");
+ }
+
+ if (request.method === "POST") {
try {
- const body: ConversationThreadRequest = await request.json();
- const { action } = body;
+ const payload = await request.json() as {
+ messages: Array<{ role: string; content: string }>;
+ system: string;
+ model: string;
+ max_tokens: number;
+ stream: boolean; // LLM streams regardless, this is if we stream to the client
+ };
+
+ const description = JSON.stringify(payload.messages).slice(0, 80);
+
+ const cacheKey = await hashKey(JSON.stringify(payload));
+ const cachedResult = await loadCacheItem(cacheKey);
+ if (cachedResult) {
+ console.log("Cache hit:", description);
+ return new Response(JSON.stringify(cachedResult), {
+ headers: { "Content-Type": "application/json" },
+ });
+ }
- switch (action) {
- case "create": {
- const { message, system, activeTools } = body;
- return handleCreateConversationThread(system, message, activeTools);
- }
- case "append": {
- const { threadId, message } = body;
- return handleAppendToConversationThread(threadId, message);
- }
- default:
- return new Response(JSON.stringify({ error: "Invalid action" }), {
- status: 400,
+ console.log("Cache miss:", description);
+
+ let messages = payload.messages;
+
+ let params = {
+ model: payload.model,
+ system: payload.system,
+ messages,
+ }
+
+ const { textStream } = await streamText({
+ ...params,
+ model: anthropic(payload.model)
+ });
+
+ let result = "";
+
+ if (messages[messages.length - 1].role === "assistant") {
+ result = messages[messages.length - 1].content;
+ }
+
+ if (payload.stream) {
+ const stream = new ReadableStream({
+ async start(controller) {
+ if (messages[messages.length - 1].role === "assistant") {
+ controller.enqueue(new TextEncoder().encode(result + '\n'));
+ }
+ for await (const delta of textStream) {
+ result += delta;
+ controller.enqueue(new TextEncoder().encode(delta + '\n'));
+ }
+
+ if (messages[messages.length - 1].role === "user") {
+ messages.push({ role: "assistant", content: result });
+ } else {
+ messages[messages.length - 1].content = result;
+ }
+ await saveCacheItem(cacheKey, params); // after finishing, save!
+ controller.close();
+ },
+ });
+
+ return new Response(stream, {
+ headers: {
+ "Content-Type": "text/event-stream",
+ "Transfer-Encoding": "chunked"
+ },
+ });
+ }
+
+ for await (const delta of textStream) {
+ result += delta;
+ }
+
+ if (!result) {
+ return new Response(
+ JSON.stringify({ error: "No response from LLM" }),
+ {
+ status: 500,
headers: { "Content-Type": "application/json" },
- });
+ }
+ );
}
- } catch (error) {
- return new Response(JSON.stringify({ error: error.message }), {
- status: 400,
+
+ if (messages[messages.length - 1].role === "user") {
+ messages.push({ role: "assistant", content: result });
+ } else {
+ messages[messages.length - 1].content = result;
+ }
+
+ await saveCacheItem(cacheKey, params);
+
+ return new Response(JSON.stringify(params), {
headers: { "Content-Type": "application/json" },
});
+ } catch (error) {
+ return new Response(
+ JSON.stringify({ error: (error as Error).message }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
}
} else {
return new Response("Please send a POST request", { status: 405 });
@@ -92,134 +148,6 @@ async function saveCacheItem(key: string, data: any): Promise {
await Deno.writeTextFile(filePath, JSON.stringify(data, null, 2));
}
-async function handleCreateConversationThread(
- system: string,
- message: string,
- activeTools: CoreTool[]
-): Promise {
- const cacheKey = `${system}:${message}`;
-
- const cachedResult = await loadCacheItem(cacheKey);
- if (cachedResult) {
- console.log(
- "Cache hit!",
- (cacheKey.slice(0, 20) + "..." + cacheKey.slice(-20)).replaceAll("\n", "")
- );
- return new Response(JSON.stringify(cachedResult), {
- headers: { "Content-Type": "application/json" },
- });
- }
-
- const thread = threadManager.create(system, message, activeTools);
- const result = await processConversationThread(thread);
- if (result.type === "error") {
- return new Response(JSON.stringify(result), {
- status: 400,
- headers: { "Content-Type": "application/json" },
- });
- }
-
- if (result.assistantResponse) {
- threadManager.update(thread.id, [result.assistantResponse]);
- }
-
- await saveCacheItem(cacheKey, result);
-
- return new Response(JSON.stringify(result), {
- headers: { "Content-Type": "application/json" },
- });
-}
-
-async function handleAppendToConversationThread(
- threadId: string,
- message?: string
-): Promise {
- const thread = threadManager.get(threadId);
- if (!thread) {
- return new Response(JSON.stringify({ error: "Thread not found" }), {
- status: 404,
- headers: { "Content-Type": "application/json" },
- });
- }
-
- if (message) {
- threadManager.update(threadId, [
- {
- role: "user",
- content: message,
- },
- ]);
- }
-
- const result = await processConversationThread(thread);
- if (result.type === "error") {
- return new Response(JSON.stringify(result), {
- status: 400,
- headers: { "Content-Type": "application/json" },
- });
- }
-
- // Update the thread with the assistant's response
- if (result.assistantResponse) {
- threadManager.update(threadId, [result.assistantResponse]);
- }
-
- // Remove the assistantResponse from the result before sending it to the client
- const { assistantResponse, ...responseToClient } = result;
-
- return new Response(JSON.stringify(responseToClient), {
- headers: { "Content-Type": "application/json" },
- });
-}
-
-type ProcessConversationThreadResult =
- | {
- type: "success";
- threadId: string;
- output: string;
- assistantResponse: CoreAssistantMessage;
- conversation: CoreMessage[];
- }
- | { type: "error"; error: string };
-
-async function processConversationThread(
- thread: ConversationThread
-): Promise {
- console.log("Thread", thread);
-
- const result = await ask(
- thread.conversation,
- thread.system,
- thread.activeTools
- );
- if (!result) {
- return { type: "error", error: "No response from Anthropic" };
- }
-
- // Find the new assistant's response (it should be the last message)
- const assistantResponse = result[result.length - 1];
- if (assistantResponse.role !== "assistant") {
- return { type: "error", error: "No assistant response found" };
- }
-
- if (Array.isArray(assistantResponse.content)) {
- assistantResponse.content = assistantResponse.content
- .filter((msg) => msg.type == "text")
- .map((msg) => msg.text)
- .join(" ");
- }
-
- const output = assistantResponse.content;
- console.log("Output=", output);
- return {
- type: "success",
- threadId: thread.id,
- output,
- assistantResponse,
- conversation: result,
- };
-}
-
const port = Deno.env.get("PORT") || "8000";
console.log(`HTTP webserver running. Access it at: http://localhost:${port}/`);
await serve(handler, { port: parseInt(port) });
From cae1d28b31eeaae9e3ad625fa766197b8450daf2 Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Fri, 4 Oct 2024 16:12:32 -0400
Subject: [PATCH 02/19] more progress cleaning up streaming
---
.../src/builtins/generate-data.ts | 107 ++++-------
.../packages/common-runner/src/llm-client.ts | 41 ++--
.../packages/llm-client/src/dummy-data.ts | 28 ---
typescript/packages/llm-client/src/index.ts | 180 +++++++-----------
.../src/recipes/annotation.ts | 32 ++--
.../src/recipes/dataDesigner.ts | 6 +-
.../src/recipes/iframe.ts | 29 ++-
.../src/recipes/luft-bnb-search.ts | 4 +-
.../lookslike-high-level/src/recipes/wiki.ts | 9 +-
.../packages/planning-server/src/index.ts | 16 +-
10 files changed, 186 insertions(+), 266 deletions(-)
delete mode 100644 typescript/packages/llm-client/src/dummy-data.ts
diff --git a/typescript/packages/common-runner/src/builtins/generate-data.ts b/typescript/packages/common-runner/src/builtins/generate-data.ts
index 7a76ebb75..43d541c11 100644
--- a/typescript/packages/common-runner/src/builtins/generate-data.ts
+++ b/typescript/packages/common-runner/src/builtins/generate-data.ts
@@ -2,9 +2,9 @@ import { type Node } from "@commontools/common-builder";
import { cell, CellImpl, ReactivityLog } from "../cell.js";
import { sendValueToBinding, findAllAliasedCells } from "../utils.js";
import { schedule, Action } from "../scheduler.js";
-import { generateData as generateDataClient } from "@commontools/llm-client";
+import { SimpleMessage } from "@commontools/llm-client";
import { mapBindingsToCell, normalizeToCells } from "../utils.js";
-import { mockResultClient } from "../llm-client.js";
+import { makeClient, dataRequest } from "../llm-client.js";
// TODO: generateData should really be a recipe, not a builtin, and either the
// underlying llm client call or even just fetch the built-in.
@@ -77,77 +77,48 @@ export function generateData(
fullResult.setAtPath([], undefined, log);
partialResult.setAtPath([], undefined, log);
- let resultPromise: Promise;
- let fullMessages = messages?.map((message, index) => ({ role: index % 2 === 0 ? "user" : "assistant", content: message }))
-
- if (system) {
- resultPromise = fetch("/api/llm", {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- },
- body: JSON.stringify({
- messages: fullMessages,
- system,
- model: "claude-3-5-sonnet-20240620",
- max_tokens: 4096,
- stream: true
- })
- }).then(async (response) => {
- if (!response.body) {
- throw new Error("No response body");
- }
-
- // if json, just return the response
- if (response.headers.get("content-type") === "application/json") {
- return response.json().then((data) => {
- let messages = data['messages']
- let lastMessage = messages[messages.length - 1]
- console.log("lastMessage", lastMessage)
- return grab(lastMessage['content'])
- });
- }
-
- const reader = response.body.getReader();
- const decoder = new TextDecoder();
- let doneReading = false;
- let partialText = "";
-
- while (!doneReading) {
- const { value, done } = await reader.read();
- doneReading = done;
- if (value) {
- const chunk = decoder.decode(value, { stream: true });
- partialText += chunk.slice(0, -1); // remove the \n
- partialResult.setAtPath([], partialText, log);
- }
- }
-
- return grab(partialText);
- }).catch((err) => {
- console.error(err);
- pending.setAtPath([], false, log);
- });
- } else {
- resultPromise = generateDataClient(
- mockResultClient,
- messages[messages.length - 1],
- result,
- schema
- );
- }
+ let fullMessages: SimpleMessage[] = (messages || []).map(
+ (message, index) => ({ role: index % 2 === 0 ? "user" : "assistant", content: message })
+ );
+
+ // FIXME(ja): lack of system prompt => "create system prompt about json":
+ let effectiveSystem = system || dataRequest({
+ description: "hmm",
+ inputData: result,
+ jsonSchema: schema,
+ })
+
+ const updatePartial = (t: string) => partialResult.setAtPath([], t, log);
+
+ let resultPromise = makeClient().sendRequest({
+ messages: fullMessages,
+ system: effectiveSystem,
+ model: "claude-3-5-sonnet-20240620",
+ max_tokens: 4096,
+ }, updatePartial)
const thisRun = ++currentRun;
- resultPromise.then((result) => {
- if (thisRun !== currentRun) return;
+ resultPromise
+ .then((a: SimpleMessage) => {
+ console.log("result", a);
+ return a;
+ })
+ .then((assistant: SimpleMessage) => grab(assistant.content))
+ .then((result) => {
+ if (thisRun !== currentRun) return;
- normalizeToCells(result, undefined, log);
+ normalizeToCells(result, undefined, log);
- pending.setAtPath([], false, log);
- fullResult.setAtPath([], result, log);
- partialResult.setAtPath([], result, log);
- });
+ pending.setAtPath([], false, log);
+ fullResult.setAtPath([], result, log);
+ partialResult.setAtPath([], result, log);
+ }).catch((error) => {
+ console.error("Error generating data", error);
+ pending.setAtPath([], false, log);
+ fullResult.setAtPath([], undefined, log);
+ partialResult.setAtPath([], undefined, log);
+ });
};
schedule(startGeneration, {
diff --git a/typescript/packages/common-runner/src/llm-client.ts b/typescript/packages/common-runner/src/llm-client.ts
index d25fd17bd..6ca4306eb 100644
--- a/typescript/packages/common-runner/src/llm-client.ts
+++ b/typescript/packages/common-runner/src/llm-client.ts
@@ -1,23 +1,26 @@
import { LLMClient } from "@commontools/llm-client";
-export const LLM_SERVER_URL =
- window.location.protocol + "//" + window.location.host + "/api/llm";
-export const suggestionClient = new LLMClient({
- serverUrl: LLM_SERVER_URL,
- system:
- "You are an assistant that helps match user queries to relevant data gems based on their names and types.",
- tools: [],
-});
+export const suggestSystem = "You are an assistant that helps match user queries to relevant data gems based on their names and types."
+export const jsonDataRequest = `Generate dummy data as JSON as per the provided spec. Use the input to imagine what an API response would look like for a request.`
-export const mockResultClient = new LLMClient({
- serverUrl: LLM_SERVER_URL,
- system: `Generate dummy data as JSON as per the provided spec. Use the input to imagine what an API response would look like for a request.`,
- tools: [],
-});
+export const LLM_SERVER_URL = window.location.protocol + "//" + window.location.host + "/api/llm";
+export const makeClient = () => new LLMClient(LLM_SERVER_URL);
-export const makeClient = (system: string) =>
- new LLMClient({
- serverUrl: LLM_SERVER_URL,
- system,
- tools: [],
- });
+export function dataRequest({
+ description,
+ inputData,
+ jsonSchema,
+}: {
+ description: string;
+ inputData: any;
+ jsonSchema: any;
+}) {
+ return `You specialize in generating believable and useful data for testing applications during development. Take the provided input parameters and use them to hallucinate a plausible result that conforms to the following JSON schema:
+
+ ${JSON.stringify(jsonSchema)}
+
+ ${description}
+ ${JSON.stringify(inputData)}
+
+ Respond with only the generated data in a JSON block.`;
+}
diff --git a/typescript/packages/llm-client/src/dummy-data.ts b/typescript/packages/llm-client/src/dummy-data.ts
deleted file mode 100644
index 355fa7894..000000000
--- a/typescript/packages/llm-client/src/dummy-data.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import { Schema } from "@cfworker/json-schema";
-import { LLMClient } from "./index.js";
-
-export function grabJson(txt: string) {
- return JSON.parse(txt.match(/```json\n([\s\S]+?)```/)?.[1] ?? "{}");
-}
-
-export async function generateData(
- client: LLMClient,
- description: string,
- inputData: any,
- jsonSchema: Schema,
-) {
- const request = `
- You specialize in generating believable and useful data for testing applications during development. Take the provided input parameters and use them to hallucinate a plausible result that conforms to the following JSON schema:
-
- ${JSON.stringify(jsonSchema)}
-
- ${description}
- ${JSON.stringify(inputData)}
-
- Respond with only the generated data in a JSON block.`;
-
- const thread = await client.createThread(request);
- const response = thread.conversation[thread.conversation.length - 1];
-
- return grabJson(response) as T;
-}
diff --git a/typescript/packages/llm-client/src/index.ts b/typescript/packages/llm-client/src/index.ts
index 79d17350d..3b41262a1 100644
--- a/typescript/packages/llm-client/src/index.ts
+++ b/typescript/packages/llm-client/src/index.ts
@@ -1,59 +1,33 @@
-import { CoreMessage, CoreTool } from "ai";
-export * from "./dummy-data.js";
-
-export type LlmTool = CoreTool & {
- implementation: (input: any) => Promise | string;
-};
-
-export interface ClientConfig {
- serverUrl: string;
- tools: LlmTool[];
- system?: string;
+export type SimpleMessage = {
+ role: "user" | "assistant",
+ content: string,
}
-export class ConversationThread {
- constructor(
- private client: LLMClient,
- public id: string,
- public conversation: string[] = [],
- ) {
- this.client = client;
- this.id = id;
- this.conversation = conversation;
- }
-
- async sendMessage(message: string): Promise {
- const response: AppendThreadResponse = await this.client.continueThread(
- this.id,
- message,
- );
-
- this.conversation.push(`User: ${message}`);
- let assistantResponse = response.assistantResponse;
- this.conversation.push(`Assistant: ${assistantResponse.content}`);
-
- return response.output;
- }
+type LLMRequest = {
+ messages: SimpleMessage[],
+ system: string,
+ model: string,
+ max_tokens: number,
+ stream?: boolean,
}
+
export class LLMClient {
private serverUrl: string;
- private tools: LlmTool[];
- private system: string;
-
- constructor(config: ClientConfig) {
- this.serverUrl = config.serverUrl;
- this.tools = config.tools;
- this.system =
- config.system ||
- "You are a helpful assistant that uses the provided tools to create effect.";
+
+ constructor(serverUrl: string) {
+ this.serverUrl = serverUrl;
}
- private async sendRequest(body: any): Promise {
+ async sendRequest(userRequest: LLMRequest, partialCB?: (text: string) => void): Promise {
+ const fullRequest: LLMRequest = {
+ ...userRequest,
+ stream: partialCB ? true : false,
+ }
const response = await fetch(this.serverUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
- body: JSON.stringify(body),
+ body: JSON.stringify(fullRequest),
});
if (!response.ok) {
@@ -63,74 +37,64 @@ export class LLMClient {
);
}
- return await response.json();
- }
-
- async createThread(message: string): Promise {
- const request: CreateThreadRequest = {
- action: "create",
- system: this.system,
- message,
- activeTools: this.tools.map(({ implementation, ...tool }) => tool),
- };
-
- const response: CreateThreadResponse = await this.sendRequest(request);
- const thread = new ConversationThread(this, response.threadId);
+ if (!response.body) {
+ throw new Error("No response body");
+ }
- const initialAssistantResponse = response.output;
- thread.conversation.push(`User: ${message}`);
- thread.conversation.push(`Assistant: ${initialAssistantResponse}`);
+ // if server responds with json, just return the response
+ if (response.headers.get("content-type") === "application/json") {
+ return response.json() as Promise;
+ }
- return thread;
- }
+ let content = await this.stream(response.body, partialCB);
- async continueThread(
- threadId: string,
- message?: string,
- toolResponses: ToolResponse[] = [],
- ): Promise {
- const request: AppendThreadRequest = {
- action: "append",
- threadId,
- message,
- toolResponses,
- };
-
- return await this.sendRequest(request);
+ return { content, role: "assistant" };
}
-}
-// Types (you can move these to a separate file if desired)
-interface CreateThreadRequest {
- action: "create";
- system: string;
- message: string;
- activeTools: CoreTool[];
-}
-
-interface AppendThreadRequest {
- action: "append";
- threadId: string;
- message?: string;
- toolResponses: ToolResponse[];
-}
-
-interface CreateThreadResponse {
- threadId: string;
- output: string;
- assistantResponse: CoreMessage;
- conversation: CoreMessage[];
-}
+ private async stream(body: ReadableStream, cb?: (partial: string) => void): Promise {
+ const reader = body.getReader();
+ const decoder = new TextDecoder();
+
+ let doneReading = false;
+ let buffer = "";
+ let text = "";
+
+ while (!doneReading) {
+ const { value, done } = await reader.read();
+ doneReading = done;
+ if (value) {
+ const chunk = decoder.decode(value, { stream: true });
+ buffer += chunk;
+
+ let newlineIndex: number;
+ while ((newlineIndex = buffer.indexOf('\n')) >= 0) {
+ const line = buffer.slice(0, newlineIndex).trim();
+ buffer = buffer.slice(newlineIndex + 1);
+
+ if (line) {
+ try {
+ const t = JSON.parse(line);
+ text += t;
+ if (cb) cb(text);
+ } catch (error) {
+ console.error("Failed to parse JSON line:", line, error);
+ }
+ }
+ }
+ }
+ }
-interface AppendThreadResponse {
- threadId: string;
- assistantResponse: CoreMessage;
- output: string;
- conversation: CoreMessage[];
-}
+ // Handle any remaining buffer
+ if (buffer.trim()) {
+ try {
+ const t = JSON.parse(buffer.trim());
+ text += t;
+ if (cb) cb(text);
+ } catch (error) {
+ console.error("Failed to parse final JSON line:", buffer, error);
+ }
+ }
-interface ToolResponse {
- type: "tool_result";
- tool_use_id: string;
- content: { type: "text"; text: string }[];
+ return text;
+ }
}
diff --git a/typescript/packages/lookslike-high-level/src/recipes/annotation.ts b/typescript/packages/lookslike-high-level/src/recipes/annotation.ts
index e82dca439..789d62102 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/annotation.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/annotation.ts
@@ -36,26 +36,33 @@ const getCharmInfo = lift(({ charms }) => {
return charmInfo;
});
-const buildPrompt = lift(
- ({ query, charmInfo }) =>
- `Given the following user query and list of data charms, return the indices of the charms that are most relevant to the query.
+const buildMessages = lift(
+ ({ query, charmInfo }) => {
+
+ return [
+ `Given the following user query and list of data charms, return the indices of the charms that are most relevant to the query.
Consider both the names and types of the charms when making your selection.
Think broadly, e.g. a stay in a hotel could match a charm called "morning routine", as the user would want to pick a hotel that supports their morning routine.
-User query: "${query}"
+
+${query}
+
-Data:
+
${JSON.stringify(charmInfo, null, 2)}
+
Respond with only JSON array of suggestions, e.g.
\`\`\`json
-[{ index: 0, chosen: "work todo list", confidence: 0.9, reason: "the use of the "work projects" implies the user might want a connection to work TODOs" }, { index: 2, chosen: "hobby projects", confidence: 0.5, reason: "projects could be referring to personal projects, hard to tell from context" }, { index: 5, chosen: "suzy collab", reason: "could this be related to Susan? she appears in several project related lists", confidence: 0.33 }]
+[
+ { index: 0, chosen: "work todo list", confidence: 0.9, reason: "the use of the "work projects" implies the user might want a connection to work TODOs" },
+ { index: 2, chosen: "hobby projects", confidence: 0.5, reason: "projects could be referring to personal projects, hard to tell from context" },
+ { index: 5, chosen: "suzy collab", reason: "could this be related to Susan? she appears in several project related lists", confidence: 0.33 }
+]
\`\`\`
-
-notalk;justgo
-`
-);
+`, '```json\n['];
+ });
const filterMatchingCharms = lift<{
matchedIndices: { index: number; confidence: number }[];
@@ -164,9 +171,8 @@ export const annotation = recipe<{
const { result: matchedIndices } = generateData<
{ index: number; confidence: number }[]
>({
- prompt: buildPrompt({ query, charmInfo }),
- system:
- "You are an assistant that helps match user queries to relevant data charms based on their names and types.",
+ messages: buildMessages({ query, charmInfo }),
+ system: "You are an assistant that helps match user queries to relevant data charms based on their names and types.",
});
const matchingCharms = filterMatchingCharms({ matchedIndices, charmInfo });
const suggestion = findSuggestion({ matchingCharms, data });
diff --git a/typescript/packages/lookslike-high-level/src/recipes/dataDesigner.ts b/typescript/packages/lookslike-high-level/src/recipes/dataDesigner.ts
index 4a458cb1d..7337ca883 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/dataDesigner.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/dataDesigner.ts
@@ -53,13 +53,13 @@ const addToPrompt = handler<
state.query = state.prompt;
});
-const buildJSONGenPrompt = lift(({ prompt, data }) => {
+const buildJSONGenMessages = lift(({ prompt, data }) => {
console.log("prompt", prompt, data);
let fullPrompt = prompt;
if (data) {
fullPrompt += `\n\nHere's the previous JSON for reference:\n\`\`\`json\n${JSON.stringify(data, null, 2)}\n\`\`\``;
}
- return fullPrompt;
+ return [fullPrompt, '```json\n']
});
const onAcceptData = handler(
@@ -86,7 +86,7 @@ export const dataDesigner = recipe<{
tap({ lastData });
const { result } = generateData({
- prompt: buildJSONGenPrompt({ prompt, data: lastData }),
+ messages: buildJSONGenMessages({ prompt, data: lastData }),
system: systemPrompt,
mode: "json",
});
diff --git a/typescript/packages/lookslike-high-level/src/recipes/iframe.ts b/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
index 0edc62c6b..e16a15f3d 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
@@ -44,7 +44,7 @@ const maybeSRC = lift(({ src, pending, partial }) => {
Error generating content
`;
- if (partial) return '
'+ partial.replace(/
' + partial.replace(/ {
+const buildUiMessages = lift(({ prompt, lastSrc }) => {
let fullPrompt = prompt;
if (lastSrc) {
fullPrompt += `\n\nHere's the previous HTML for reference:\n\`\`\`html\n${lastSrc}\n\`\`\``;
}
- return fullPrompt;
+ return [fullPrompt, '```html\n']
});
-const buildSuggestionsPrompt = lift(({ src, prompt, schema }) => {
- let fullPrompt = `Given the current prompt: "${prompt}"`;
- fullPrompt += `\n\nGiven the following schema:\n\n${JSON.stringify(schema, null, 2)}\n`;
- if (src) {
- fullPrompt += `\n\nAnd the previous HTML:\n\`\`\`html\n${src}\n\`\`\``;
- }
- fullPrompt += `\n\nSuggest 3 prompts to enhancem, refine or branch off into a new UI. Keep it simple these add or change a single feature. Return the suggestions in a JSON block with the following structure:
+const buildSuggestionsMessages = lift(({ src, prompt, schema }) => {
+ if (!src) return;
+
+ let user = `Given the current prompt: "${prompt}"`;
+ user += `\n\nGiven the following schema:\n\n${JSON.stringify(schema, null, 2)}\n`;
+ user += `\n\nAnd the current HTML:\n\`\`\`html\n${src}\n\`\`\``;
+ user += `\n\nSuggest 3 prompts to enhancem, refine or branch off into a new UI. Keep it simple these add or change a single feature. Return the suggestions in a JSON block with the following structure:
\`\`\`json
{
"suggestions": [
@@ -184,7 +184,8 @@ const buildSuggestionsPrompt = lift(({ src, prompt, schema }) => {
Do not ever exceed a single sentence. Prefer terse, suggestions that take one step.
`;
- return fullPrompt;
+
+ return [user, '```json\n{"suggestions":['];
});
const getSuggestions = lift(({ result }) => result?.suggestions ?? []);
@@ -219,22 +220,20 @@ export const iframe = recipe<{
// });
const { result: suggestionsResult } = generateData<{ suggestions: Suggestion[] }>({
- messages: [buildSuggestionsPrompt({ src, prompt, schema }), '```json\n{"suggestions":['],
+ messages: buildSuggestionsMessages({ src, prompt, schema }),
system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential. Respond in a json block.`,
mode: "json",
});
const suggestions = getSuggestions({ result: suggestionsResult });
- tap({ suggestions });
const { result: htmlResult, pending: htmlPending, partial: partialHtml } = generateData<{ html: string }>({
- messages: [buildUiPrompt({ prompt, lastSrc }), '```html\n'],
+ messages: buildUiMessages({ prompt, lastSrc }),
system: viewSystemPrompt({ schema }),
mode: "html",
});
src = maybeHTML({ result: htmlResult });
let preview = maybeSRC({ src, pending: htmlPending, partial: partialHtml });
- tap({ preview })
let firstSuggestion = getSuggestion({ suggestions, index: 0 });
let secondSuggestion = getSuggestion({ suggestions, index: 1 });
diff --git a/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts b/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts
index 542c12060..11d467030 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts
@@ -76,7 +76,7 @@ const makeBooking = handler<
});
const buildQuery = lift(({ location }) => ({
- prompt: `generate 10 places for private home short-term rentals in ${location}`,
+ messages: [`generate 10 places for private home short-term rentals in ${location}`, '```json\n'],
result: [],
schema: {
type: "array",
@@ -306,7 +306,7 @@ const generateNearbyPlaceQuery = lift(({ routine, places }) => {
}));
return {
- prompt: `generate ${initialData.length} ${locationType} with pun names`,
+ messages: [`generate ${initialData.length} ${locationType} with pun names`, '```json\n'],
initialData,
schema: {
type: "array",
diff --git a/typescript/packages/lookslike-high-level/src/recipes/wiki.ts b/typescript/packages/lookslike-high-level/src/recipes/wiki.ts
index 0bdab9ed2..9ad7e586b 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/wiki.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/wiki.ts
@@ -36,7 +36,8 @@ export const wiki = recipe<{ title: string; canon: string }>(
const {
result: { text, related }, pending,
} = generateData({
- prompt: str`Here is the pages the user has explored so far:\n\n${canon}. Generate a 2 sentence article in a fictional wiki current page titled, and a list of 5 related pages and 1 page that only partially belongs': ${title}`,
+ messages: [str`Here is the pages the user has explored so far:\n\n${canon}. Generate a 2 sentence article in a fictional wiki current page titled, and a list of 5 related pages and 1 page that only partially belongs': ${title}`,
+ '```json\n'],
schema: {
type: "object",
properties: {
@@ -71,9 +72,9 @@ export const wiki = recipe<{ title: string; canon: string }>(
[UI]: html`
+ `;
+ }
+}
\ No newline at end of file
diff --git a/typescript/packages/common-ui/src/components/index.ts b/typescript/packages/common-ui/src/components/index.ts
index 9fc67b644..b61bc158c 100644
--- a/typescript/packages/common-ui/src/components/index.ts
+++ b/typescript/packages/common-ui/src/components/index.ts
@@ -4,6 +4,7 @@ export * as CommonDict from './common-dict.js';
export * as CommonHstack from './common-hstack.js';
export * as CommonImg from './common-img.js';
export * as CommonInput from './common-input.js';
+export * as CommonInputFile from './common-input-file.js';
export * as CommonMedia from './common-media.js';
export * as CommonNavPanel from './common-navpanel.js';
export * as CommonRecord from './common-record.js';
From 55e82a318fc2676e709634a8a144d0a58893eb8b Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Mon, 7 Oct 2024 09:17:29 -0400
Subject: [PATCH 09/19] chaining lifts for prep/gen/parse
wow, so much nicer
---
.../src/recipes/generator.ts | 61 ++++++++++---------
1 file changed, 33 insertions(+), 28 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/generator.ts b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
index a7c5a4991..c7b87b57d 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/generator.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
@@ -6,16 +6,36 @@ const updateValue = handler<{ detail: { value: string } }, { value: string }>(
({ detail }, state) => { detail?.value && (state.value = detail.value); }
);
-const generateTextMessages = lift(({ textprompt }) => {
- return textprompt && [textprompt, 'It was']
+const prepText = lift(({ prompt }) => {
+ if (prompt) {
+ return {
+ messages: [prompt, 'It was'],
+ system: "You are a helpful assistant that generates text for testing. Respond in text"
+ }
+ }
+ return {};
});
-const generateJSONMessages = lift(({ jsonprompt }) => {
- return jsonprompt && [jsonprompt, '```json\n{']
+const prepJSON = lift(({ prompt }) => {
+ if (prompt) {
+ return {
+ messages: [prompt, '```json\n{'],
+ system: "You are a helpful assistant that generates JSON objects for testing. Respond in JSON",
+ stop: '```'
+ }
+ }
+ return {};
});
-const generateHTMLMessages = lift(({ htmlprompt }) => {
- return htmlprompt && [htmlprompt, '```html\n']
+const prepHTML = lift(({ prompt }) => {
+ if (prompt) {
+ return {
+ messages: [prompt, '```html\n'],
+ system: "You are a helpful assistant that generates HTML for testing. Respond in HTML",
+ stop: '```'
+ }
+ }
+ return {};
});
const grabText = lift(({ result, partial, pending }) => {
@@ -23,24 +43,23 @@ const grabText = lift(({ result, partial, pending }) => {
return partial || ''
}
return result
-})
+})
const grabJson = lift(({ result }) => {
if (!result) {
- return {};
+ return;
}
const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
if (!jsonMatch) {
console.log("No JSON found in text:", result);
- return {};
+ return;
}
return JSON.parse(jsonMatch[1]);
})
const grabHtml = lift(({ result, partial, pending }) => {
- console.log({partial, pending})
if (pending) {
- if (!partial) {
+ if (!partial) {
return ""
}
console.log(partial);
@@ -66,28 +85,14 @@ export const generator = recipe<{ jsonprompt: string; htmlprompt: string; textpr
({ jsonprompt, htmlprompt, textprompt, data }) => {
textprompt.setDefault("2 sentence story");
- const { result: textResult, partial: textPartial, pending: textPending } = generateText({
- messages: generateTextMessages({ textprompt }),
- system: "You are a helpful assistant that generates text for testing. Respond in text"
- });
- const maybeText = grabText({ result: textResult, partial: textPartial, pending: textPending });
+ const maybeText = grabText(generateText(prepText({ prompt: textprompt })))
jsonprompt.setDefault("pet");
- const { result: jsonResult } = generateText({
- messages: generateJSONMessages({ jsonprompt }),
- system: "You are a helpful assistant that generates JSON objects for testing. Respond in JSON",
- stop: '```'
- });
- data = grabJson({ result: jsonResult });
+ data = grabJson(generateText(prepJSON({ prompt: jsonprompt })));
const maybeJSON = jsonify({ data });
htmlprompt.setDefault("simple html about recipes");
- const { result: htmlResult, partial: htmlPartial, pending: htmlPending } = generateText({
- messages: generateHTMLMessages({ htmlprompt }),
- system: "You are a helpful assistant that generates HTML for testing. Respond in HTML",
- stop: '```'
- });
- const maybeHTML = grabHtml({ result: htmlResult, partial: htmlPartial, pending: htmlPending });
+ const maybeHTML = grabHtml(generateText(prepHTML({ prompt: htmlprompt })));
return {
[NAME]: 'data generator',
From 3983104bb35c19f044db623b37ab13c496139ef9 Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Mon, 7 Oct 2024 09:21:46 -0400
Subject: [PATCH 10/19] move the samples nearer to each other
---
.../src/recipes/generator.ts | 60 +++++++++----------
1 file changed, 29 insertions(+), 31 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/generator.ts b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
index c7b87b57d..897131e0d 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/generator.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
@@ -6,6 +6,7 @@ const updateValue = handler<{ detail: { value: string } }, { value: string }>(
({ detail }, state) => { detail?.value && (state.value = detail.value); }
);
+
const prepText = lift(({ prompt }) => {
if (prompt) {
return {
@@ -15,17 +16,13 @@ const prepText = lift(({ prompt }) => {
}
return {};
});
-
-const prepJSON = lift(({ prompt }) => {
- if (prompt) {
- return {
- messages: [prompt, '```json\n{'],
- system: "You are a helpful assistant that generates JSON objects for testing. Respond in JSON",
- stop: '```'
- }
+const grabText = lift(({ result, partial, pending }) => {
+ if (pending) {
+ return partial || ''
}
- return {};
-});
+ return result
+})
+
const prepHTML = lift(({ prompt }) => {
if (prompt) {
@@ -37,32 +34,11 @@ const prepHTML = lift(({ prompt }) => {
}
return {};
});
-
-const grabText = lift(({ result, partial, pending }) => {
- if (pending) {
- return partial || ''
- }
- return result
-})
-
-const grabJson = lift(({ result }) => {
- if (!result) {
- return;
- }
- const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
- if (!jsonMatch) {
- console.log("No JSON found in text:", result);
- return;
- }
- return JSON.parse(jsonMatch[1]);
-})
-
const grabHtml = lift(({ result, partial, pending }) => {
if (pending) {
if (!partial) {
return ""
}
- console.log(partial);
return partial.replace(//g, ">").slice(-1000);
}
@@ -78,6 +54,28 @@ const grabHtml = lift(({ result, partial, pending }) => {
return html
});
+
+const prepJSON = lift(({ prompt }) => {
+ if (prompt) {
+ return {
+ messages: [prompt, '```json\n{'],
+ system: "You are a helpful assistant that generates JSON objects for testing. Respond in JSON",
+ stop: '```'
+ }
+ }
+ return {};
+});
+const grabJson = lift(({ result }) => {
+ if (!result) {
+ return;
+ }
+ const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
+ if (!jsonMatch) {
+ console.log("No JSON found in text:", result);
+ return;
+ }
+ return JSON.parse(jsonMatch[1]);
+})
const jsonify = lift(({ data }) => JSON.stringify(data, null, 2))
export const generator = recipe<{ jsonprompt: string; htmlprompt: string; textprompt: string; data: any }>(
From d638a209edbf7b9bfded24a2827f7370ec7304e0 Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Mon, 7 Oct 2024 10:04:17 -0400
Subject: [PATCH 11/19] example recipe using zod for json generation
---
typescript/package-lock.json | 20 ++++++++++++-
.../lookslike-high-level/package.json | 6 ++--
.../src/recipes/generator.ts | 30 +++++++++++++++----
3 files changed, 47 insertions(+), 9 deletions(-)
diff --git a/typescript/package-lock.json b/typescript/package-lock.json
index 776caad3d..a2db56764 100644
--- a/typescript/package-lock.json
+++ b/typescript/package-lock.json
@@ -12058,7 +12058,9 @@
"@commontools/common-runner": "^0.0.1",
"@commontools/common-ui": "^0.0.1",
"@commontools/llm-client": "^0.0.1",
- "@commontools/lookslike-sagas": "^0.0.1"
+ "@commontools/lookslike-sagas": "^0.0.1",
+ "zod": "^3.23.8",
+ "zod-to-json-schema": "^3.23.3"
},
"devDependencies": {
"jsdom": "^25.0.0",
@@ -12068,6 +12070,22 @@
"wireit": "^0.14.4"
}
},
+ "packages/lookslike-high-level/node_modules/zod": {
+ "version": "3.23.8",
+ "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz",
+ "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==",
+ "funding": {
+ "url": "https://github.com/sponsors/colinhacks"
+ }
+ },
+ "packages/lookslike-high-level/node_modules/zod-to-json-schema": {
+ "version": "3.23.3",
+ "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.3.tgz",
+ "integrity": "sha512-TYWChTxKQbRJp5ST22o/Irt9KC5nj7CdBKYB/AosCRdj/wxEMvv4NNaj9XVUHDOIp53ZxArGhnw5HMZziPFjog==",
+ "peerDependencies": {
+ "zod": "^3.23.3"
+ }
+ },
"packages/lookslike-prototype": {
"name": "@commontools/lookslike-prototype",
"version": "0.0.1",
diff --git a/typescript/packages/lookslike-high-level/package.json b/typescript/packages/lookslike-high-level/package.json
index 695dc198f..d69c28f7b 100644
--- a/typescript/packages/lookslike-high-level/package.json
+++ b/typescript/packages/lookslike-high-level/package.json
@@ -45,12 +45,14 @@
},
"dependencies": {
"@commontools/common-builder": "^0.0.1",
- "@commontools/common-runner": "^0.0.1",
"@commontools/common-frp": "^0.0.1",
"@commontools/common-html": "^0.0.1",
+ "@commontools/common-runner": "^0.0.1",
"@commontools/common-ui": "^0.0.1",
"@commontools/llm-client": "^0.0.1",
- "@commontools/lookslike-sagas": "^0.0.1"
+ "@commontools/lookslike-sagas": "^0.0.1",
+ "zod": "^3.23.8",
+ "zod-to-json-schema": "^3.23.3"
},
"overrides": {
"tough-cookie": "^5.0.0-rc.4"
diff --git a/typescript/packages/lookslike-high-level/src/recipes/generator.ts b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
index 897131e0d..e448d01b6 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/generator.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
@@ -1,6 +1,7 @@
import { html } from "@commontools/common-html";
import { recipe, NAME, UI, handler, lift, generateText } from "@commontools/common-builder";
-
+import { z } from 'zod';
+import zodToJsonSchema from 'zod-to-json-schema';
const updateValue = handler<{ detail: { value: string } }, { value: string }>(
({ detail }, state) => { detail?.value && (state.value = detail.value); }
@@ -55,30 +56,47 @@ const grabHtml = lift(({ result, partial, pending }) => {
});
+const Character = z.object({
+ name: z.string(),
+ class: z
+ .string()
+ .describe('Character class, e.g. warrior, mage, or thief.'),
+ description: z.string(),
+});
+type Character = z.infer;
+
const prepJSON = lift(({ prompt }) => {
+ const jsonSchema = JSON.stringify(zodToJsonSchema(Character), null, 2);
+
if (prompt) {
return {
messages: [prompt, '```json\n{'],
- system: "You are a helpful assistant that generates JSON objects for testing. Respond in JSON",
+ system: `Generate character data inspired by the user description using JSON:\n\n${jsonSchema}`,
stop: '```'
}
}
return {};
});
-const grabJson = lift(({ result }) => {
+const grabJson = lift<{ result: string }, Character | undefined>(({ result }) => {
if (!result) {
return;
}
const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
if (!jsonMatch) {
- console.log("No JSON found in text:", result);
+ console.error("No JSON found in text:", result);
+ return;
+ }
+ let rawData = JSON.parse(jsonMatch[1]);
+ let parsedData = Character.safeParse(rawData);
+ if (!parsedData.success) {
+ console.error("Invalid JSON:", parsedData.error);
return;
}
- return JSON.parse(jsonMatch[1]);
+ return parsedData.data;
})
const jsonify = lift(({ data }) => JSON.stringify(data, null, 2))
-export const generator = recipe<{ jsonprompt: string; htmlprompt: string; textprompt: string; data: any }>(
+export const generator = recipe<{ jsonprompt: string; htmlprompt: string; textprompt: string; data: Character | undefined }>(
"data generator",
({ jsonprompt, htmlprompt, textprompt, data }) => {
From f7dd266b44f0f60178a26a100f11a694efa498f6 Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Mon, 7 Oct 2024 10:11:10 -0400
Subject: [PATCH 12/19] simplify parsing lifts
---
.../src/recipes/generator.ts | 24 +++++++------------
1 file changed, 8 insertions(+), 16 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/generator.ts b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
index e448d01b6..87b7e0c81 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/generator.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
@@ -17,12 +17,7 @@ const prepText = lift(({ prompt }) => {
}
return {};
});
-const grabText = lift(({ result, partial, pending }) => {
- if (pending) {
- return partial || ''
- }
- return result
-})
+const grabText = lift(({ partial }) => { return partial || '' })
const prepHTML = lift(({ prompt }) => {
@@ -35,21 +30,18 @@ const prepHTML = lift(({ prompt }) => {
}
return {};
});
-const grabHtml = lift(({ result, partial, pending }) => {
- if (pending) {
- if (!partial) {
- return ""
- }
- return partial.replace(//g, ">").slice(-1000);
+const grabHtml = lift(({ partial, pending }) => {
+ if (!partial) {
+ return ""
}
- if (!result) {
- return "";
+ if (pending) {
+ return `${partial.slice(-1000).replace(//g, ">").slice(-1000)}`;
}
- const html = result.match(/```html\n([\s\S]+?)```/)?.[1];
+ const html = partial.match(/```html\n([\s\S]+?)```/)?.[1];
if (!html) {
- console.error("No HTML found in text", result);
+ console.error("No HTML found in text", partial);
return "";
}
return html
From b53989b38d39d1093de178547078311d303c860e Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Mon, 7 Oct 2024 12:53:10 -0400
Subject: [PATCH 13/19] iframe uses genText
---
.../src/recipes/iframe.ts | 282 ++++++++++--------
1 file changed, 161 insertions(+), 121 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/iframe.ts b/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
index e16a15f3d..4289d2b1d 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
@@ -4,20 +4,16 @@ import {
UI,
NAME,
lift,
- generateData,
+ generateText,
handler,
str,
cell,
createJsonSchema,
} from "@commontools/common-builder";
+import { z } from "zod";
import { launch } from "../data.js";
-type Suggestion = {
- behaviour: 'append' | 'fork',
- prompt: string,
-}
-
const formatData = lift(({ obj }) => {
console.log("stringify", obj);
return JSON.stringify(obj, null, 2);
@@ -32,85 +28,6 @@ const updateValue = handler<{ detail: { value: string } }, { value: string }>(
({ detail }, state) => detail?.value && (state.value = detail.value),
);
-const maybeHTML = lift(({ result }) => {
- return result?.html || ''
-});
-
-const maybeSRC = lift(({ src, pending, partial }) => {
- if (src) return src;
- if (!pending) return `
-
- ❌
-
Error generating content
-
- `;
- if (partial) return '
' + partial.replace(/ `generate a complete HTML document within a html block , e.g.
- \`\`\`html
- ...
- \`\`\`
-
- This must be complete HTML.
- Import Tailwind (include \`\`) and style the page using it. Use tasteful, minimal defaults with a consistent style but customize based on the request.
- Import React (include \`
-
-
-
- \`) and write the app using it.
-
- You may not use any other libraries unless requested by the user.
-
- The document can and should make use of postMessage to read and write data from the host context. e.g.
-
- document.addEventListener('DOMContentLoaded', function() {
- console.log('Initialized!');
-
- window.parent.postMessage({
- type: 'subscribe',
- key: 'exampleKey'
- }, '*');
-
- window.addEventListener('message', function(event) {
- if (event.data.type === 'readResponse') {
- // use response
- console.log('readResponse', event.data.key,event.data.value);
- } else if (event.data.type === 'update') {
- // event.data.value is a JSON object already
- // refer to schema for structure
- ...
- });
- });
-
- window.parent.postMessage({
- type: 'write',
- key: 'exampleKey',
- value: 'Example data to write'
- }, '*');
-
- You can subscribe and unsubscribe to changes from the keys:
-
- window.parent.postMessage({
- type: 'subscribe',
- key: 'exampleKey'
- }, '*');
-
- You receive 'update' messages with a 'key' and 'value' field.
-
- window.parent.postMessage({
- type: 'unsubscribe',
- key: 'exampleKey',
- }, '*');
-
-
- ${JSON.stringify(schema, null, 2)}
-
-
- It's best to access and manage each state reference seperately.`,
-);
-
const deriveJsonSchema = lift(({ data, filter }) => {
const schema = createJsonSchema({}, data)?.["properties"];
if (!schema) return {};
@@ -156,44 +73,176 @@ const acceptSuggestion = handler<
}
});
-const buildUiMessages = lift(({ prompt, lastSrc }) => {
- let fullPrompt = prompt;
- if (lastSrc) {
- fullPrompt += `\n\nHere's the previous HTML for reference:\n\`\`\`html\n${lastSrc}\n\`\`\``;
- }
- return [fullPrompt, '```html\n']
+
+const Suggestion = z.object({
+ behaviour: z.enum(['append', 'fork']),
+ prompt: z.string(),
});
+type Suggestion = z.infer;
-const buildSuggestionsMessages = lift(({ src, prompt, schema }) => {
- if (!src) return;
+const prepSuggestions = lift(({ src, prompt, schema }) => {
+ if (!src) {
+ return {};
+ }
let user = `Given the current prompt: "${prompt}"`;
user += `\n\nGiven the following schema:\n\n${JSON.stringify(schema, null, 2)}\n`;
user += `\n\nAnd the current HTML:\n\`\`\`html\n${src}\n\`\`\``;
- user += `\n\nSuggest 3 prompts to enhancem, refine or branch off into a new UI. Keep it simple these add or change a single feature. Return the suggestions in a JSON block with the following structure:
- \`\`\`json
- {
- "suggestions": [
- {
- "behaviour": "append" | "fork",
- "prompt": "string"
- }
- ]
- }
- \`\`\`
+ user += `\n\nSuggest 3 prompts to enhancem, refine or branch off into a new UI. Keep it simple these add or change a single feature.
Do not ever exceed a single sentence. Prefer terse, suggestions that take one step.
`;
- return [user, '```json\n{"suggestions":['];
+ return {
+ messages: [user, '```json\n{"suggestions":['],
+ system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential. Respond in a json block.
+
+\`\`\`json
+{
+ "suggestions": [
+ {
+ "behaviour": "append" | "fork",
+ "prompt": "string"
+ }
+ ]
+}
+\`\`\``,
+ stop: '```'
+ }
});
-const getSuggestions = lift(({ result }) => result?.suggestions ?? []);
+
+const grabSuggestions = lift<{ result: string }, Suggestion[]>(({ result }) => {
+ if (!result) {
+ return [];
+ }
+ const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
+ if (!jsonMatch) {
+ console.error("No JSON found in text:", result);
+ return [];
+ }
+ let rawData = JSON.parse(jsonMatch[1]);
+ let parsedData = Suggestion.array().safeParse(rawData['suggestions'] || []);
+ if (!parsedData.success) {
+ console.error("Invalid JSON:", parsedData.error);
+ return [];
+ }
+ return parsedData.data;
+})
const getSuggestion = lift(({ suggestions, index }: { suggestions: Suggestion[], index: number }) => {
return suggestions[index] || { behaviour: '', prompt: '' };
});
+const prepHTML = lift(({ prompt, schema, lastSrc }) => {
+ if (!prompt) {
+ return {};
+ }
+
+ let fullPrompt = prompt;
+ if (lastSrc) {
+ fullPrompt += `\n\nHere's the previous HTML for reference:\n\`\`\`html\n${lastSrc}\n\`\`\``;
+ }
+
+ const base = `
+
+
+
+
+
+`
+
+ return {
+ messages: [fullPrompt, '```html\n' + base],
+ stop: '```',
+ system: `generate a complete HTML document within a html block , e.g.
+ \`\`\`html
+ ...
+ \`\`\`
+
+ This must be complete HTML.
+ Import Tailwind and style the page using it. Use tasteful, minimal defaults with a consistent style but customize based on the request.
+ Import React and write the app using it.
+
+ You may not use any other libraries unless requested by the user (in which case, use a CDN to import them)
+
+ The document can and should make use of postMessage to read and write data from the host context. e.g.
+
+ document.addEventListener('DOMContentLoaded', function() {
+ console.log('Initialized!');
+
+ window.parent.postMessage({
+ type: 'subscribe',
+ key: 'exampleKey'
+ }, '*');
+
+ window.addEventListener('message', function(event) {
+ if (event.data.type === 'readResponse') {
+ // use response
+ console.log('readResponse', event.data.key,event.data.value);
+ } else if (event.data.type === 'update') {
+ // event.data.value is a JSON object already
+ // refer to schema for structure
+ ...
+ });
+ });
+
+ window.parent.postMessage({
+ type: 'write',
+ key: 'exampleKey',
+ value: 'Example data to write'
+ }, '*');
+
+ You can subscribe and unsubscribe to changes from the keys:
+
+ window.parent.postMessage({
+ type: 'subscribe',
+ key: 'exampleKey'
+ }, '*');
+
+ You receive 'update' messages with a 'key' and 'value' field.
+
+ window.parent.postMessage({
+ type: 'unsubscribe',
+ key: 'exampleKey',
+ }, '*');
+
+
+ ${JSON.stringify(schema, null, 2)}
+
+
+ It's best to access and manage each state reference seperately.`
+ }
+});
+
+const grabHTML = lift<{ pending: boolean, partial: string }, string>(({ pending, partial }) => {
+ if (pending || !partial) {
+ return '';
+ }
+ const html = partial.match(/```html\n([\s\S]+?)```/)?.[1];
+ if (!html) {
+ console.error("No HTML found in text", partial);
+ return '';
+ }
+ return html
+});
+
+const genHTMLView = lift(({ pending, partial }) => {
+ if (!partial) {
+ return "";
+ }
+ if (pending) {
+ return `${partial.slice(-1000).replace(//g, ">").slice(-1000)}`;
+ }
+ const html = partial.match(/```html\n([\s\S]+?)```/)?.[1];
+ if (!html) {
+ console.error("No HTML found in text", partial);
+ return `
error generating html...
${partial.replace(/>/g, ">")}
`;
+ }
+ return html
+});
+
+
export const iframe = recipe<{
title: string;
prompt: string;
@@ -219,21 +268,12 @@ export const iframe = recipe<{
// mode: "json",
// });
- const { result: suggestionsResult } = generateData<{ suggestions: Suggestion[] }>({
- messages: buildSuggestionsMessages({ src, prompt, schema }),
- system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential. Respond in a json block.`,
- mode: "json",
- });
- const suggestions = getSuggestions({ result: suggestionsResult });
-
- const { result: htmlResult, pending: htmlPending, partial: partialHtml } = generateData<{ html: string }>({
- messages: buildUiMessages({ prompt, lastSrc }),
- system: viewSystemPrompt({ schema }),
- mode: "html",
- });
+ const suggestions = grabSuggestions(generateText(prepSuggestions({ src, prompt, schema })));
- src = maybeHTML({ result: htmlResult });
- let preview = maybeSRC({ src, pending: htmlPending, partial: partialHtml });
+ // this html is a bit of a mess as changing src triggers suggestions and view (showing streaming)
+ const { pending: pendingHTML, partial: partialHTML } = generateText(prepHTML({ prompt, schema, lastSrc }));
+ src = grabHTML({ pending: pendingHTML, partial: partialHTML });
+ const viewsrc = genHTMLView({ pending: pendingHTML, partial: partialHTML });
let firstSuggestion = getSuggestion({ suggestions, index: 0 });
let secondSuggestion = getSuggestion({ suggestions, index: 1 });
@@ -247,7 +287,7 @@ export const iframe = recipe<{
placeholder="title"
oncommon-input=${updateValue({ value: title })}
>
-
+ View Data
Date: Mon, 7 Oct 2024 17:40:25 -0400
Subject: [PATCH 14/19] prompt uses genText
---
.../src/recipes/prompts.ts | 75 ++++++++++++-------
1 file changed, 46 insertions(+), 29 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/prompts.ts b/typescript/packages/lookslike-high-level/src/recipes/prompts.ts
index 43571d652..a8a182275 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/prompts.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/prompts.ts
@@ -2,13 +2,20 @@ import { html } from "@commontools/common-html";
import {
recipe,
lift,
- generateData,
+ generateText,
handler,
NAME,
UI,
- str,
} from "@commontools/common-builder";
import { launch } from '../data.js';
+import { z } from 'zod';
+import zodToJsonSchema from 'zod-to-json-schema';
+
+const Prompt = z.object({
+ prompt: z.string().describe('Image generation prompt'),
+});
+type Prompt = z.infer;
+const jsonSchema = JSON.stringify(zodToJsonSchema(Prompt), null, 2);
const imageUrl = lift(
({ title }) => `https://ct-img.m4ke.workers.dev/?prompt=${encodeURIComponent(title)}`
@@ -22,41 +29,51 @@ const updateTitle = handler<{ detail: { value: string } }, { title: string }>(
({ detail }, state) => { (state.title = detail?.value ?? "untitled") }
);
-const maybeList = lift(({ result }) => result || []);
+const grabPrompts = lift<{ result: string }, Prompt[]>(({ result }) => {
+ console.log("grabPrompts", result);
+ if (!result) {
+ return [];
+ }
+ const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
+ if (!jsonMatch) {
+ console.error("No JSON found in text:", result);
+ return [];
+ }
+ let rawData = JSON.parse(jsonMatch[1]);
+ let parsedData = z.array(Prompt).safeParse(rawData);
+ if (!parsedData.success) {
+ console.error("Invalid JSON:", parsedData.error);
+ return [];
+ }
+ return parsedData.data;
+});
+
+const buildPrompt = lift<{ title: string }, { messages: string[], system: string, stop: string } | {}>(({ title }) => {
+ if (!title) return;
-// FIXME(ja): if type Prompt is just a string, the render map fails
-type Prompt = {
- prompt: string;
-}
+ return {
+ system: `Generate 10 image prompt variations when a user sends you a prompt.
+Some should change just the style, some should change the content,
+and some should change both. The last should be a completely different prompt.
+
+${jsonSchema}`,
+ messages: [`Generate image prompt variations for: ${title}`, '```json\n['],
+ stop: '```'
+ }
+});
const addToPrompt = handler<
- { prompt: string },
- { title: string }
+ { prompt: string },
+ { title: string }
>((e, state) => {
- state.title += " " + e.prompt;
+ state.title += " " + e.prompt;
});
export const prompt = recipe<{ title: string }>("prompt", ({ title }) => {
title.setDefault("abstract geometric art");
- const { result } = generateData({
- prompt: str`generate 10 image prompt variations for the current prompt: ${title}. Some should change just the style, some should change the content, and some should change both. The last should be a completely different prompt.`,
- result: [],
- schema: {
- type: "array",
- items: {
- type: "object",
- properties: {
- prompt: {
- type: "string",
- },
- },
- },
- },
- mode: "json",
- });
+ const variations = grabPrompts(generateText(buildPrompt({ title })));
let src = imageUrl({ title });
- let variations = maybeList({result});
return {
[NAME]: title,
@@ -67,10 +84,10 @@ export const prompt = recipe<{ title: string }>("prompt", ({ title }) => {
oncommon-input=${updateTitle({ title })}
>
-
${variations.map(({ prompt }) => html`
${prompt} - ⏩
`)}
+
${variations.map(({ prompt }) => html`
${prompt}
`)}
`,
title,
- variations: result,
+ variations,
addToPrompt: addToPrompt({ title }),
};
});
From 902c2527d1906431b89dc6cca99714c4fd4ccb8c Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Mon, 7 Oct 2024 17:41:15 -0400
Subject: [PATCH 15/19] iframe uses generateText
---
.../src/recipes/iframe.ts | 53 +++++++++----------
1 file changed, 26 insertions(+), 27 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/iframe.ts b/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
index 4289d2b1d..c550fb1b1 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/iframe.ts
@@ -4,6 +4,7 @@ import {
UI,
NAME,
lift,
+ ifElse,
generateText,
handler,
str,
@@ -85,16 +86,23 @@ const prepSuggestions = lift(({ src, prompt, schema }) => {
return {};
}
- let user = `Given the current prompt: "${prompt}"`;
- user += `\n\nGiven the following schema:\n\n${JSON.stringify(schema, null, 2)}\n`;
- user += `\n\nAnd the current HTML:\n\`\`\`html\n${src}\n\`\`\``;
- user += `\n\nSuggest 3 prompts to enhancem, refine or branch off into a new UI. Keep it simple these add or change a single feature.
+ let instructions = `Given the current prompt: "${prompt}"
- Do not ever exceed a single sentence. Prefer terse, suggestions that take one step.
- `;
+Given the following schema:
+
+${JSON.stringify(schema, null, 2)}
+
+
+And the current HTML:
+
+${src}
+
+Suggest 3 prompts to enhance, refine or branch off into a new UI. Keep it simple these add or change a single feature.
+
+Do not ever exceed a single sentence. Prefer terse, suggestions that take one step.`;
return {
- messages: [user, '```json\n{"suggestions":['],
+ messages: [instructions, '```json\n{"suggestions":['],
system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential. Respond in a json block.
\`\`\`json
@@ -215,31 +223,23 @@ const prepHTML = lift(({ prompt, schema, lastSrc }) => {
}
});
-const grabHTML = lift<{ pending: boolean, partial: string }, string>(({ pending, partial }) => {
- if (pending || !partial) {
+const grabHTML = lift<{ result: string }, string | undefined>(({ result }) => {
+ if (!result) {
return '';
}
- const html = partial.match(/```html\n([\s\S]+?)```/)?.[1];
+ const html = result.match(/```html\n([\s\S]+?)```/)?.[1];
if (!html) {
- console.error("No HTML found in text", partial);
+ console.error("No HTML found in text", result);
return '';
}
return html
});
-const genHTMLView = lift(({ pending, partial }) => {
- if (!partial) {
+const previewHTML = lift(({ pending, partial }) => {
+ if (!partial || !pending) {
return "";
}
- if (pending) {
- return `${partial.slice(-1000).replace(//g, ">").slice(-1000)}`;
- }
- const html = partial.match(/```html\n([\s\S]+?)```/)?.[1];
- if (!html) {
- console.error("No HTML found in text", partial);
- return `
error generating html...
${partial.replace(/>/g, ">")}
`;
- }
- return html
+ return partial.slice(-200);
});
@@ -270,10 +270,8 @@ export const iframe = recipe<{
const suggestions = grabSuggestions(generateText(prepSuggestions({ src, prompt, schema })));
- // this html is a bit of a mess as changing src triggers suggestions and view (showing streaming)
- const { pending: pendingHTML, partial: partialHTML } = generateText(prepHTML({ prompt, schema, lastSrc }));
- src = grabHTML({ pending: pendingHTML, partial: partialHTML });
- const viewsrc = genHTMLView({ pending: pendingHTML, partial: partialHTML });
+ // FIXME(ja): this html is a bit of a mess as changing src triggers suggestions and view (showing streaming)
+ const { result, pending: pendingHTML, partial: partialHTML } = generateText(prepHTML({ prompt, schema, lastSrc }));
let firstSuggestion = getSuggestion({ suggestions, index: 0 });
let secondSuggestion = getSuggestion({ suggestions, index: 1 });
@@ -287,7 +285,8 @@ export const iframe = recipe<{
placeholder="title"
oncommon-input=${updateValue({ value: title })}
>
-
+
+
${previewHTML({ partial: partialHTML, pending: pendingHTML })}
View Data
Date: Mon, 7 Oct 2024 18:38:45 -0400
Subject: [PATCH 16/19] luftbnb almost working again
---
.../src/recipes/luft-bnb-search.ts | 226 ++++++++----------
1 file changed, 101 insertions(+), 125 deletions(-)
diff --git a/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts b/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts
index 11d467030..65813eb2e 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/luft-bnb-search.ts
@@ -4,27 +4,52 @@ import {
lift,
handler,
str,
- generateData,
+ generateText,
UI,
NAME,
} from "@commontools/common-builder";
import { addSuggestion, description } from "../suggestions.js";
import { launch, ID } from "../data.js";
+import { z } from 'zod';
+import zodToJsonSchema from 'zod-to-json-schema';
-export interface LuftBnBPlace {
- // Schema for a place
- id: string;
- title: string;
- host: string;
- location: string;
- propertyType: "Apartment" | "House" | "Room";
- pricePerNight: number;
- numberOfGuests: number;
- latitude: number;
- longitude: number;
- rating: number;
- annotationUI: any;
-}
+const LuftBnBPlace = z.object({
+ id: z.string().describe('Unique identifier for the listing'),
+ title: z.string().describe('Title of the listing'),
+ host: z.string().describe('Host of the listing'),
+ location: z.string().describe('Street corner, Neighborhood and city of the listing'),
+ propertyType: z.enum(['Apartment', 'House', 'Room']),
+ pricePerNight: z.number().min(0),
+ numberOfGuests: z.number().int().min(1),
+ latitude: z.number(),
+ longitude: z.number(),
+ rating: z.number().min(0).max(5).describe('Average rating of the listing'),
+ annotationUI: z.string().describe('empty string - do not add anything here'),
+});
+
+type LuftBnBPlace = z.infer;
+
+const LuftBnBPlaces = z.array(LuftBnBPlace);
+
+const jsonSchema = JSON.stringify(zodToJsonSchema(LuftBnBPlaces), null, 2);
+
+const grabPlaces = lift<{ result: string }, LuftBnBPlace[]>(({ result }) => {
+ if (!result) {
+ return [];
+ }
+ const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
+ if (!jsonMatch) {
+ console.error("No JSON found in text:", result);
+ return [];
+ }
+ let rawData = JSON.parse(jsonMatch[1]);
+ let parsedData = z.array(LuftBnBPlace).safeParse(rawData);
+ if (!parsedData.success) {
+ console.error("Invalid JSON:", parsedData.error);
+ return [];
+ }
+ return parsedData.data;
+});
const copy = lift(({ value }: { value: string }) => value);
@@ -75,67 +100,9 @@ const makeBooking = handler<
});
});
-const buildQuery = lift(({ location }) => ({
- messages: [`generate 10 places for private home short-term rentals in ${location}`, '```json\n'],
- result: [],
- schema: {
- type: "array",
- items: {
- type: "object",
- properties: {
- id: {
- type: "string",
- description: "Unique identifier for the listing",
- },
- title: {
- type: "string",
- description: "Title of the listing",
- },
- host: {
- type: "string",
- description: "Host of the listing",
- },
- location: {
- type: "string",
- description: "Street corner, Neighborhood and city of the listing",
- },
- propertyType: {
- type: "string",
- enum: ["Apartment", "House", "Room"],
- },
- pricePerNight: {
- type: "number",
- minimum: 0,
- },
- numberOfGuests: {
- type: "integer",
- minimum: 1,
- },
- latitude: {
- type: "number",
- },
- longitude: {
- type: "number",
- },
- rating: {
- type: "number",
- minimum: 0,
- maximum: 5,
- description: "Average rating of the listing",
- },
- },
- required: [
- "id",
- "title",
- "host",
- "location",
- "propertyType",
- "pricePerNight",
- "numberOfGuests",
- "imageUrl",
- ],
- },
- },
+const buildQuery = lift(({ location, startDate, endDate }) => ({
+ messages: [`generate 10 places for private home short-term rentals in ${location} between ${startDate} and ${endDate}`, '```json\n['],
+ system: `Generate a list of places in json format\n\n${jsonSchema}`,
}));
export const luftBnBSearch = recipe<{
@@ -158,9 +125,7 @@ export const luftBnBSearch = recipe<{
const endDateUI = copy({ value: endDate });
const locationUI = copy({ value: location });
- const { result: places } = generateData(
- buildQuery({ location })
- );
+ const places = grabPlaces(generateText(buildQuery({ location, startDate, endDate })));
return {
[UI]: html`
@@ -186,18 +151,18 @@ export const luftBnBSearch = recipe<{
>Search
${places.map(
- (place) => html`
+ (place) => html`
${place.title}
@@ -212,9 +177,8 @@ export const luftBnBSearch = recipe<{
Book for $${place.pricePerNight} per night
${place.annotationUI}
-
- `
- )}
+ `
+ )}
`,
@@ -255,11 +219,9 @@ const computeBookingDatesFromEvent = lift(({ date }) => {
const describeFirstResult = lift(({ places, startDate, endDate }) => {
return places && places.length
- ? `${places[0].propertyType} ${startDate}-${endDate} in ${
- places[0].location
- }. ${"⭐".repeat(Math.round(places[0].rating))} (${places[0].rating}). $${
- places[0].pricePerNight
- } per night`
+ ? `${places[0].propertyType} ${startDate}-${endDate} in ${places[0].location
+ }. ${"⭐".repeat(Math.round(places[0].rating))} (${places[0].rating}). $${places[0].pricePerNight
+ } per night`
: "Searching...";
});
@@ -298,6 +260,19 @@ addSuggestion({
},
});
+
+const NearbyPlace = z.object({
+ id: z.string().describe('Unique identifier for the listing'),
+ name: z.string().describe('Name of the place'),
+ location: z.string().describe(`Street corner, Neighborhood and city`),
+ walkingDistance: z.number().describe('Walking distance in minutes'),
+});
+
+type NearbyPlace = z.infer;
+
+const NearbyPlaces = z.array(NearbyPlace);
+type NearbyPlaces = z.infer;
+
const generateNearbyPlaceQuery = lift(({ routine, places }) => {
const locationType = routine.locations[0] ?? "coffee shop";
@@ -305,35 +280,37 @@ const generateNearbyPlaceQuery = lift(({ routine, places }) => {
location: place.location,
}));
- return {
- messages: [`generate ${initialData.length} ${locationType} with pun names`, '```json\n'],
- initialData,
- schema: {
- type: "array",
- items: {
- type: "object",
- properties: {
- id: {
- type: "string",
- description: "Unique identifier for the listing",
- },
- name: {
- type: "string",
- description: `Name of the ${locationType}`,
- },
- location: {
- type: "string",
- description:
- "Street corner, Neighborhood and city of the ${locationType}",
- },
- walkingDistance: {
- type: "number",
- description: "Walking distance in minutes",
- },
- },
- },
- },
+
+ const jsonSchema = JSON.stringify(zodToJsonSchema(NearbyPlaces), null, 2);
+
+ let r = {
+ messages: [`generate ${initialData.length} ${locationType} with pun names`,
+ '```json\n['],
+ system: `Generate a list of ${locationType} places in json format\n\n${jsonSchema}`,
+ stop: '```',
};
+ console.log(JSON.stringify(r, null, 2));
+ return r;
+});
+
+// FIXME(ja): validate that the recommendations work here...
+const grabNearbyPlaces = lift<{ result: string }, NearbyPlaces>(({ result }) => {
+ if (!result) {
+ return [];
+ }
+ const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
+ if (!jsonMatch) {
+ console.error("No JSON found in text:", result);
+ return [];
+ }
+
+ let rawData = JSON.parse(jsonMatch[1]);
+ let parsedData = z.array(NearbyPlace).safeParse(rawData);
+ if (!parsedData.success) {
+ console.error("Invalid JSON:", parsedData.error);
+ return [];
+ }
+ return parsedData.data;
});
// NOTE: This writes results into `places`
@@ -352,9 +329,8 @@ const nearbyPlacesForRoutine = recipe<{
routine: { locations: string[] };
places: LuftBnBPlace[];
}>("annotate places for routine", ({ routine, places }) => {
- const query = generateNearbyPlaceQuery({ routine, places });
- const { result: nearbyPlaces } = generateData(query);
+ const nearbyPlaces = grabNearbyPlaces(generateText(generateNearbyPlaceQuery({ routine, places })));
annotatePlacesWithNearbyPlaces({ nearbyPlaces, places });
From 43ef2fc6197ddc558af757720494bf22c18f7f96 Mon Sep 17 00:00:00 2001
From: Jesse Andrews
Date: Tue, 8 Oct 2024 12:27:17 -0400
Subject: [PATCH 17/19] another pass through all recipes
generateText -> llm
---
.../packages/common-builder/src/built-in.ts | 19 ++--
.../packages/common-builder/src/index.ts | 2 +-
.../common-runner/src/builtins/index.ts | 4 +-
.../src/builtins/{generate-text.ts => llm.ts} | 36 ++++---
.../src/components/common-suggestions.ts | 2 +
.../src/recipes/annotation.ts | 50 +++++++---
.../src/recipes/generator.ts | 19 ++--
.../src/recipes/iframe.ts | 40 ++++----
.../src/recipes/local-search.ts | 95 +++++++++++--------
.../src/recipes/luft-bnb-search.ts | 43 ++++-----
.../src/recipes/playlist.ts | 76 ++++++++-------
.../src/recipes/prompts.ts | 4 +-
.../lookslike-high-level/src/recipes/wiki.ts | 90 +++++++++++-------
13 files changed, 271 insertions(+), 209 deletions(-)
rename typescript/packages/common-runner/src/builtins/{generate-text.ts => llm.ts} (78%)
diff --git a/typescript/packages/common-builder/src/built-in.ts b/typescript/packages/common-builder/src/built-in.ts
index e5baf583d..e2f81964f 100644
--- a/typescript/packages/common-builder/src/built-in.ts
+++ b/typescript/packages/common-builder/src/built-in.ts
@@ -1,19 +1,20 @@
import { lift, createNodeFactory } from "./module.js";
import { Value, NodeFactory, CellProxy } from "./types.js";
-export function generateText(
+export function llm(
params: Value<{
- messages: string[];
+ messages?: string[];
+ prompt?: string;
system?: string;
stop?: string;
max_tokens?: number;
}>
-): CellProxy<{ pending: boolean; result: string; partial: string; error: any }> {
- generateTextFactory ||= createNodeFactory({
+): CellProxy<{ pending: boolean; result?: string; partial?: string; error: any }> {
+ llmFactory ||= createNodeFactory({
type: "builtin",
- implementation: "generateText",
+ implementation: "llm",
});
- return generateTextFactory(params);
+ return llmFactory(params);
}
export function fetchData(
@@ -52,10 +53,10 @@ export function ifElse(
let ifElseFactory: NodeFactory<[any, any, any], any> | undefined = undefined;
-let generateTextFactory:
+let llmFactory:
| NodeFactory<
- { messages: string[]; system?: string; stop?: string; max_tokens?: number },
- { pending: boolean; result: any; partial: any; error: any }
+ { messages?: string[]; prompt?: string; system?: string; stop?: string; max_tokens?: number },
+ { pending: boolean; result?: string; partial?: string; error: any }
>
| undefined = undefined;
diff --git a/typescript/packages/common-builder/src/index.ts b/typescript/packages/common-builder/src/index.ts
index beddf8d1c..b7ccbf73b 100644
--- a/typescript/packages/common-builder/src/index.ts
+++ b/typescript/packages/common-builder/src/index.ts
@@ -6,7 +6,7 @@ export {
isolated,
} from "./module.js";
export { recipe } from "./recipe.js";
-export { streamData, fetchData, generateText, ifElse, str } from "./built-in.js";
+export { streamData, fetchData, llm, ifElse, str } from "./built-in.js";
export {
ID,
TYPE,
diff --git a/typescript/packages/common-runner/src/builtins/index.ts b/typescript/packages/common-runner/src/builtins/index.ts
index 781f276a7..d102aabf7 100644
--- a/typescript/packages/common-runner/src/builtins/index.ts
+++ b/typescript/packages/common-runner/src/builtins/index.ts
@@ -3,7 +3,7 @@ import { type CellImpl } from "../cell.js";
import { map } from "./map.js";
import { fetchData } from "./fetch-data.js";
import { streamData } from "./stream-data.js";
-import { generateText } from "./generate-text.js";
+import { llm } from "./llm.js";
import { ifElse } from "./if-else.js";
export const builtins: {
[key: string]: (recipeCell: CellImpl, node: Node) => void;
@@ -11,6 +11,6 @@ export const builtins: {
map,
fetchData,
streamData,
- generateText,
+ llm,
ifElse,
};
diff --git a/typescript/packages/common-runner/src/builtins/generate-text.ts b/typescript/packages/common-runner/src/builtins/llm.ts
similarity index 78%
rename from typescript/packages/common-runner/src/builtins/generate-text.ts
rename to typescript/packages/common-runner/src/builtins/llm.ts
index 1e1045592..a5502dcf3 100644
--- a/typescript/packages/common-runner/src/builtins/generate-text.ts
+++ b/typescript/packages/common-runner/src/builtins/llm.ts
@@ -23,11 +23,11 @@ import { makeClient, SimpleMessage, SimpleContent } from "../llm-client.js";
* @param stop - A cell to store (optional) stop sequence.
* @param max_tokens - A cell to store the maximum number of tokens to generate.
*
- * @returns { pending: boolean, result: any, partial: any } - As individual
+ * @returns { pending: boolean, result?: string, partial?: string } - As individual
* cells, representing `pending` state, final `result` and incrementally
* updating `partial` result.
*/
-export function generateText(
+export function llm(
recipeCell: CellImpl,
{ inputs, outputs }: Node
) {
@@ -41,27 +41,23 @@ export function generateText(
const inputsCell = cell(inputBindings);
const pending = cell(false);
- const fullResult = cell(undefined);
- const partialResult = cell(undefined);
+ const result = cell(undefined);
+ const partial = cell(undefined);
- const resultCell = cell({
- pending,
- result: fullResult,
- partial: partialResult,
- });
+ const outputCell = cell({ pending, result, partial });
const outputBindings = mapBindingsToCell(outputs, recipeCell) as any[];
- sendValueToBinding(recipeCell, outputBindings, resultCell);
+ sendValueToBinding(recipeCell, outputBindings, outputCell);
let currentRun = 0;
const startGeneration: Action = (log: ReactivityLog) => {
const thisRun = ++currentRun;
- const { system, messages, prompt, stop, max_tokens } = inputsCell.getAsProxy([], log);
+ const { system, messages, prompt, stop, max_tokens } = inputsCell.getAsProxy([], log) ?? {};
- fullResult.setAtPath([], undefined, log);
- partialResult.setAtPath([], undefined, log);
+ result.setAtPath([], undefined, log);
+ partial.setAtPath([], undefined, log);
if (((prompt === undefined || prompt.length === 0) && (messages === undefined || messages.length === 0)) || system === undefined) {
pending.setAtPath([], false, log);
@@ -69,9 +65,9 @@ export function generateText(
}
pending.setAtPath([], true, log);
- const updatePartial = (t: string) => {
+ const updatePartial = (text: string) => {
if (thisRun != currentRun) return;
- partialResult.setAtPath([], t, log);
+ partial.setAtPath([], text, log);
}
let resultPromise = makeClient().sendRequest({
@@ -83,20 +79,20 @@ export function generateText(
}, updatePartial)
resultPromise
- .then((result) => {
+ .then((text) => {
if (thisRun !== currentRun) return;
// normalizeToCells(result, undefined, log);
pending.setAtPath([], false, log);
- fullResult.setAtPath([], result, log);
- partialResult.setAtPath([], result, log);
+ result.setAtPath([], text, log);
+ partial.setAtPath([], text, log);
}).catch((error) => {
if (thisRun !== currentRun) return;
console.error("Error generating data", error);
pending.setAtPath([], false, log);
- fullResult.setAtPath([], undefined, log);
- partialResult.setAtPath([], undefined, log);
+ result.setAtPath([], undefined, log);
+ partial.setAtPath([], undefined, log);
});
};
diff --git a/typescript/packages/common-ui/src/components/common-suggestions.ts b/typescript/packages/common-ui/src/components/common-suggestions.ts
index 88918ac31..6afc4ec55 100644
--- a/typescript/packages/common-ui/src/components/common-suggestions.ts
+++ b/typescript/packages/common-ui/src/components/common-suggestions.ts
@@ -57,6 +57,8 @@ export class CommonSuggestionsElement extends LitElement {
}
};
+ // FIXME(ja): Cannot read properties of undefined (reading 'slice')
+ // broken on main - doesn't seem to be regression of the llm changes
const suggestions = this.suggestions.slice(0, this.limit);
return html`
diff --git a/typescript/packages/lookslike-high-level/src/recipes/annotation.ts b/typescript/packages/lookslike-high-level/src/recipes/annotation.ts
index 789d62102..f649d308e 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/annotation.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/annotation.ts
@@ -9,7 +9,7 @@ import {
NAME,
TYPE,
ifElse,
- generateData,
+ llm,
} from "@commontools/common-builder";
import { type Charm, openCharm } from "../data.js";
import {
@@ -18,9 +18,17 @@ import {
getCellReferenceOrValue,
} from "@commontools/common-runner";
import { suggestions } from "../suggestions.js";
+import { z } from "zod";
const MINIMUM_CONFIDENCE = -1.0;
+const Suggestion = z.object({
+ index: z.number(),
+ confidence: z.number(),
+ reason: z.string(),
+});
+type Suggestion = z.infer;
+
type CharmInfo = { id: number; name: string; type: string };
// Lifted functions at module scope
@@ -36,11 +44,13 @@ const getCharmInfo = lift(({ charms }) => {
return charmInfo;
});
-const buildMessages = lift(
+const buildQuery = lift(
({ query, charmInfo }) => {
- return [
- `Given the following user query and list of data charms, return the indices of the charms that are most relevant to the query.
+ return {
+ system: "You are an assistant that helps match user queries to relevant data charms based on their names and types.",
+ messages: [
+ `Given the following user query and list of data charms, return the indices of the charms that are most relevant to the query.
Consider both the names and types of the charms when making your selection.
Think broadly, e.g. a stay in a hotel could match a charm called "morning routine", as the user would want to pick a hotel that supports their morning routine.
@@ -61,11 +71,32 @@ Respond with only JSON array of suggestions, e.g.
{ index: 5, chosen: "suzy collab", reason: "could this be related to Susan? she appears in several project related lists", confidence: 0.33 }
]
\`\`\`
-`, '```json\n['];
+`, '```json\n['
+ ],
+ stop: '```',
+ };
});
+const grabJson = lift<{ result?: string }, Suggestion[]>(({ result }) => {
+ if (!result) {
+ return [];
+ }
+ const jsonMatch = result.match(/```json\n([\s\S]+?)```/);
+ if (!jsonMatch) {
+ console.error("No JSON found in text:", result);
+ return [];
+ }
+ let rawData = JSON.parse(jsonMatch[1]);
+ let parsedData = z.array(Suggestion).safeParse(rawData);
+ if (!parsedData.success) {
+ console.error("Invalid JSON:", parsedData.error);
+ return [];
+ }
+ return parsedData.data;
+});
+
const filterMatchingCharms = lift<{
- matchedIndices: { index: number; confidence: number }[];
+ matchedIndices: Suggestion[];
charmInfo: CharmInfo[];
}>(
({ matchedIndices, charmInfo }) =>
@@ -168,12 +199,7 @@ export const annotation = recipe<{
charms: Charm[];
}>("annotation", ({ query, target, data, charms }) => {
const charmInfo = getCharmInfo({ charms });
- const { result: matchedIndices } = generateData<
- { index: number; confidence: number }[]
- >({
- messages: buildMessages({ query, charmInfo }),
- system: "You are an assistant that helps match user queries to relevant data charms based on their names and types.",
- });
+ const matchedIndices = grabJson(llm(buildQuery({ query, charmInfo })));
const matchingCharms = filterMatchingCharms({ matchedIndices, charmInfo });
const suggestion = findSuggestion({ matchingCharms, data });
diff --git a/typescript/packages/lookslike-high-level/src/recipes/generator.ts b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
index 87b7e0c81..613e9ce59 100644
--- a/typescript/packages/lookslike-high-level/src/recipes/generator.ts
+++ b/typescript/packages/lookslike-high-level/src/recipes/generator.ts
@@ -1,5 +1,5 @@
import { html } from "@commontools/common-html";
-import { recipe, NAME, UI, handler, lift, generateText } from "@commontools/common-builder";
+import { recipe, NAME, UI, handler, lift, llm } from "@commontools/common-builder";
import { z } from 'zod';
import zodToJsonSchema from 'zod-to-json-schema';
@@ -17,7 +17,7 @@ const prepText = lift(({ prompt }) => {
}
return {};
});
-const grabText = lift(({ partial }) => { return partial || '' })
+const grabText = lift<{ partial?: string }, string>(({ partial }) => { return partial || '' })
const prepHTML = lift(({ prompt }) => {
@@ -30,13 +30,13 @@ const prepHTML = lift(({ prompt }) => {
}
return {};
});
-const grabHtml = lift(({ partial, pending }) => {
+const grabHtml = lift<{ partial?: string; pending?: boolean }, string>(({ partial, pending }) => {
if (!partial) {
return ""
}
if (pending) {
- return `${partial.slice(-1000).replace(//g, ">").slice(-1000)}`;
+ return `${partial.split('\n').slice(-5).join('\n').replace(//g, ">")}`;
}
const html = partial.match(/```html\n([\s\S]+?)```/)?.[1];
@@ -69,7 +69,7 @@ const prepJSON = lift(({ prompt }) => {
}
return {};
});
-const grabJson = lift<{ result: string }, Character | undefined>(({ result }) => {
+const grabJson = lift<{ result?: string }, Character | undefined>(({ result }) => {
if (!result) {
return;
}
@@ -93,14 +93,13 @@ export const generator = recipe<{ jsonprompt: string; htmlprompt: string; textpr
({ jsonprompt, htmlprompt, textprompt, data }) => {
textprompt.setDefault("2 sentence story");
- const maybeText = grabText(generateText(prepText({ prompt: textprompt })))
+ const maybeText = grabText(llm(prepText({ prompt: textprompt })))
jsonprompt.setDefault("pet");
- data = grabJson(generateText(prepJSON({ prompt: jsonprompt })));
- const maybeJSON = jsonify({ data });
+ data = grabJson(llm(prepJSON({ prompt: jsonprompt })));
htmlprompt.setDefault("simple html about recipes");
- const maybeHTML = grabHtml(generateText(prepHTML({ prompt: htmlprompt })));
+ const maybeHTML = grabHtml(llm(prepHTML({ prompt: htmlprompt })));
return {
[NAME]: 'data generator',
@@ -119,7 +118,7 @@ export const generator = recipe<{ jsonprompt: string; htmlprompt: string; textpr
placeholder="Request to LLM"
oncommon-input=${updateValue({ value: jsonprompt })}
>
-
${maybeJSON}
+
${jsonify({ data })}
HTML
{
let instructions = `Given the current prompt: "${prompt}"
-Given the following schema:
+Suggest 3 prompts to enhance, refine or branch off into a new UI. Keep it simple these add or change a single feature.
+
+Do not ever exceed a single sentence. Prefer terse, suggestions that take one step.`;
+
+ return {
+ messages: [instructions, '```json\n{"suggestions":['],
+ system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential.
+
+Using the following schema:
${JSON.stringify(schema, null, 2)}
@@ -97,14 +104,8 @@ And the current HTML:
${src}
-Suggest 3 prompts to enhance, refine or branch off into a new UI. Keep it simple these add or change a single feature.
-
-Do not ever exceed a single sentence. Prefer terse, suggestions that take one step.`;
+Respond in a json block.
- return {
- messages: [instructions, '```json\n{"suggestions":['],
- system: `Suggest extensions to the UI either as modifications or forks off into new interfaces. Avoid bloat, focus on the user experience and creative potential. Respond in a json block.
-
\`\`\`json
{
"suggestions": [
@@ -120,7 +121,7 @@ Do not ever exceed a single sentence. Prefer terse, suggestions that take one st
});
-const grabSuggestions = lift<{ result: string }, Suggestion[]>(({ result }) => {
+const grabSuggestions = lift<{ result?: string }, Suggestion[]>(({ result }) => {
if (!result) {
return [];
}
@@ -223,26 +224,25 @@ const prepHTML = lift(({ prompt, schema, lastSrc }) => {
}
});
-const grabHTML = lift<{ result: string }, string | undefined>(({ result }) => {
+const grabHTML = lift<{ result?: string }, string | undefined>(({ result }) => {
if (!result) {
- return '';
+ return;
}
const html = result.match(/```html\n([\s\S]+?)```/)?.[1];
if (!html) {
console.error("No HTML found in text", result);
- return '';
+ return;
}
return html
});
-const previewHTML = lift(({ pending, partial }) => {
+const tail = lift<{ pending: boolean, partial?: string, lines: number }, string>(({ pending, partial, lines }) => {
if (!partial || !pending) {
return "";
}
- return partial.slice(-200);
+ return partial.split('\n').slice(-lines).join('\n');
});
-
export const iframe = recipe<{
title: string;
prompt: string;
@@ -268,10 +268,10 @@ export const iframe = recipe<{
// mode: "json",
// });
- const suggestions = grabSuggestions(generateText(prepSuggestions({ src, prompt, schema })));
+ const suggestions = grabSuggestions(llm(prepSuggestions({ src, prompt, schema })));
// FIXME(ja): this html is a bit of a mess as changing src triggers suggestions and view (showing streaming)
- const { result, pending: pendingHTML, partial: partialHTML } = generateText(prepHTML({ prompt, schema, lastSrc }));
+ const { result, pending: pendingHTML, partial: partialHTML } = llm(prepHTML({ prompt, schema, lastSrc }));
let firstSuggestion = getSuggestion({ suggestions, index: 0 });
let secondSuggestion = getSuggestion({ suggestions, index: 1 });
@@ -286,7 +286,7 @@ export const iframe = recipe<{
oncommon-input=${updateValue({ value: title })}
>
-