Skip to content

Commit e2486eb

Browse files
authored
feat: support human in the loop for TS (#686)
* feat: support human in the loop for TS * add example for custom workflow * fix: need to request humanResponseEvent to save missing step to snapshot * refactor: human response data should be any * refactor runWorkflow function to support resume stream * refactor: hitl * fix: workflow * add summary event * send tool event * use requestId from Vercel * update chat route.ts * fix copy utils/* * refactor: workflow and stream * Create eight-moons-perform.md * update typo * make schema simple * fix typo * use messages in startAgentEvent * save to snapshots folder * fix lint * feat: workflowBaseEvent * include response event in input event * simplify type * update readme * update document * fix typecheck * bump: "@llamaindex/workflow": "~1.1.8" * remove any * use fixed tsx version to fix e2e * fix wrong copy * add cli hitl examples as a use case for both Python and TS * update changeset to release create-llama also * fix e2e * fix e2e * hitl frontend chat * try disable hitl test
1 parent 66b81e5 commit e2486eb

File tree

41 files changed

+1653
-91
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+1653
-91
lines changed

.changeset/eight-moons-perform.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
"@llamaindex/server": patch
3+
"create-llama": patch
4+
---
5+
6+
feat: support human in the loop for TS

packages/create-llama/e2e/python/resolve_dependencies.spec.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ const useCases: TemplateUseCase[] = [
2020
"financial_report",
2121
"code_generator",
2222
"document_generator",
23+
"hitl",
2324
];
2425
const dataSource: string = process.env.DATASOURCE
2526
? process.env.DATASOURCE

packages/create-llama/e2e/shared/llamaindexserver_template.spec.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ const templateUseCases = [
2727
"financial_report",
2828
"deep_research",
2929
"code_generator",
30+
// "hitl",
3031
];
3132
const ejectDir = "next";
3233

packages/create-llama/e2e/typescript/resolve_dependencies.spec.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { exec } from "child_process";
33
import fs from "fs";
44
import path from "path";
55
import util from "util";
6+
import { NO_DATA_USE_CASES } from "../../helpers/constant";
67
import {
78
TemplateFramework,
89
TemplateType,
@@ -25,6 +26,7 @@ const useCases: TemplateUseCase[] = [
2526
"financial_report",
2627
"code_generator",
2728
"document_generator",
29+
"hitl",
2830
];
2931
const dataSource: string = process.env.DATASOURCE
3032
? process.env.DATASOURCE
@@ -83,7 +85,7 @@ test.describe("Test resolve TS dependencies", () => {
8385
});
8486
});
8587
// Skipping llamacloud for the use case doesn't use index.
86-
if (useCase !== "code_generator" && useCase !== "document_generator") {
88+
if (!useCase || !NO_DATA_USE_CASES.includes(useCase)) {
8789
test(`llamaParse - ${optionDescription}`, async () => {
8890
await runTest({
8991
templateType: templateType,
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,15 @@
1+
import { TemplateUseCase } from "./types";
2+
13
export const COMMUNITY_OWNER = "run-llama";
24
export const COMMUNITY_REPO = "create_llama_projects";
35
export const LLAMA_PACK_OWNER = "run-llama";
46
export const LLAMA_PACK_REPO = "llama_index";
57
export const LLAMA_PACK_FOLDER = "llama-index-packs";
68
export const LLAMA_PACK_FOLDER_PATH = `${LLAMA_PACK_OWNER}/${LLAMA_PACK_REPO}/main/${LLAMA_PACK_FOLDER}`;
9+
10+
// these use cases don't have data folder, so no need to run generate and no need to getIndex
11+
export const NO_DATA_USE_CASES: TemplateUseCase[] = [
12+
"code_generator",
13+
"document_generator",
14+
"hitl",
15+
];

packages/create-llama/helpers/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import path from "path";
44
import picocolors, { cyan } from "picocolors";
55

66
import fsExtra from "fs-extra";
7+
import { NO_DATA_USE_CASES } from "./constant";
78
import { writeLoadersConfig } from "./datasources";
89
import { createBackendEnvFile, createFrontendEnvFile } from "./env-variables";
910
import { PackageManager } from "./get-pkg-manager";
@@ -98,8 +99,9 @@ async function generateContextData(
9899
}
99100
} else {
100101
console.log(`Running ${runGenerate} to generate the context data.`);
102+
101103
const shouldRunGenerate =
102-
useCase !== "code_generator" && useCase !== "document_generator"; // Artifact use case doesn't use index.
104+
!useCase || !NO_DATA_USE_CASES.includes(useCase);
103105

104106
if (shouldRunGenerate) {
105107
await callPackageManager(packageManager, true, ["run", "generate"]);

packages/create-llama/helpers/types.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,8 @@ export type TemplateUseCase =
5959
| "contract_review"
6060
| "agentic_rag"
6161
| "code_generator"
62-
| "document_generator";
62+
| "document_generator"
63+
| "hitl";
6364
// Config for both file and folder
6465
export type FileSourceConfig =
6566
| {

packages/create-llama/helpers/typescript.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import path from "path";
44
import { bold, cyan, red, yellow } from "picocolors";
55
import { assetRelocator, copy } from "../helpers/copy";
66
import { callPackageManager } from "../helpers/install";
7+
import { NO_DATA_USE_CASES } from "./constant";
78
import { templatesDir } from "./dir";
89
import { PackageManager } from "./get-pkg-manager";
910
import { InstallTemplateArgs, ModelProvider, TemplateVectorDB } from "./types";
@@ -83,7 +84,7 @@ const installLlamaIndexServerTemplate = async ({
8384
}
8485

8586
// Simplify use case code
86-
if (useCase === "code_generator" || useCase === "document_generator") {
87+
if (useCase && NO_DATA_USE_CASES.includes(useCase)) {
8788
// Artifact use case doesn't use index.
8889
// We don't need data.ts, generate.ts
8990
await fs.rm(path.join(root, "src", "app", "data.ts"));

packages/create-llama/questions/simple.ts

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import prompts from "prompts";
2+
import { NO_DATA_USE_CASES } from "../helpers/constant";
23
import { EXAMPLE_10K_SEC_FILES, EXAMPLE_FILE } from "../helpers/datasources";
34
import { askModelConfig } from "../helpers/providers";
45
import { getTools } from "../helpers/tools";
@@ -11,7 +12,8 @@ type AppType =
1112
| "financial_report"
1213
| "deep_research"
1314
| "code_generator"
14-
| "document_generator";
15+
| "document_generator"
16+
| "hitl";
1517

1618
type SimpleAnswers = {
1719
appType: AppType;
@@ -57,6 +59,12 @@ export const askSimpleQuestions = async (
5759
value: "document_generator",
5860
description: "Build a OpenAI canvas-styled document generator.",
5961
},
62+
{
63+
title: "Human in the Loop",
64+
value: "hitl",
65+
description:
66+
"Build a CLI command workflow that is reviewed by a human before execution",
67+
},
6068
],
6169
},
6270
questionHandlers,
@@ -81,7 +89,8 @@ export const askSimpleQuestions = async (
8189
);
8290
language = newLanguage;
8391

84-
if (appType !== "code_generator" && appType !== "document_generator") {
92+
const shouldAskLlamaCloud = !NO_DATA_USE_CASES.includes(appType);
93+
if (shouldAskLlamaCloud) {
8594
const { useLlamaCloud: newUseLlamaCloud } = await prompts(
8695
{
8796
type: "toggle",
@@ -170,6 +179,12 @@ const convertAnswers = async (
170179
tools: [],
171180
modelConfig: MODEL_GPT41,
172181
},
182+
hitl: {
183+
template: "llamaindexserver",
184+
dataSources: [],
185+
tools: [],
186+
modelConfig: MODEL_GPT41,
187+
},
173188
};
174189

175190
const results = lookup[answers.appType];
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
import { Button } from "@/components/ui/button";
2+
import { Card, CardContent, CardFooter } from "@/components/ui/card";
3+
import { JSONValue, useChatUI } from "@llamaindex/chat-ui";
4+
import React, { FC, useState } from "react";
5+
import { z } from "zod";
6+
7+
// This schema is equivalent to the CLICommand model defined in events.py
8+
const CLIInputEventSchema = z.object({
9+
command: z.string(),
10+
});
11+
type CLIInputEvent = z.infer<typeof CLIInputEventSchema>;
12+
13+
const CLIHumanInput: FC<{
14+
events: JSONValue[];
15+
}> = ({ events }) => {
16+
const inputEvent = (events || [])
17+
.map((ev) => {
18+
const parseResult = CLIInputEventSchema.safeParse(ev);
19+
return parseResult.success ? parseResult.data : null;
20+
})
21+
.filter((ev): ev is CLIInputEvent => ev !== null)
22+
.at(-1);
23+
24+
const { append } = useChatUI();
25+
const [confirmedValue, setConfirmedValue] = useState<boolean | null>(null);
26+
const [editableCommand, setEditableCommand] = useState<string | undefined>(
27+
inputEvent?.command,
28+
);
29+
30+
// Update editableCommand if inputEvent changes (e.g. new event comes in)
31+
React.useEffect(() => {
32+
setEditableCommand(inputEvent?.command);
33+
}, [inputEvent?.command]);
34+
35+
const handleConfirm = () => {
36+
append({
37+
content: "Yes",
38+
role: "user",
39+
annotations: [
40+
{
41+
type: "human_response",
42+
data: {
43+
execute: true,
44+
command: editableCommand, // Use editable command
45+
},
46+
},
47+
],
48+
});
49+
setConfirmedValue(true);
50+
};
51+
52+
const handleCancel = () => {
53+
append({
54+
content: "No",
55+
role: "user",
56+
annotations: [
57+
{
58+
type: "human_response",
59+
data: {
60+
execute: false,
61+
command: inputEvent?.command,
62+
},
63+
},
64+
],
65+
});
66+
setConfirmedValue(false);
67+
};
68+
69+
return (
70+
<Card className="my-4">
71+
<CardContent className="pt-6">
72+
<p className="text-sm text-gray-700">
73+
Do you want to execute the following command?
74+
</p>
75+
<input
76+
disabled
77+
type="text"
78+
value={editableCommand || ""}
79+
onChange={(e) => setEditableCommand(e.target.value)}
80+
className="my-2 w-full overflow-x-auto rounded border border-gray-300 bg-gray-100 p-3 font-mono text-xs text-gray-800"
81+
/>
82+
</CardContent>
83+
{confirmedValue === null ? (
84+
<CardFooter className="flex justify-end gap-2">
85+
<>
86+
<Button onClick={handleConfirm}>Yes</Button>
87+
<Button onClick={handleCancel}>No</Button>
88+
</>
89+
</CardFooter>
90+
) : null}
91+
</Card>
92+
);
93+
};
94+
95+
export default CLIHumanInput;

0 commit comments

Comments
 (0)