import 'dotenv/config';
import * as readline from "node:readline/promises";
const apiKey = process.env.INWORLD_API_KEY;
if (!apiKey) {
throw new Error(
"INWORLD_API_KEY environment variable is not set. Either add it to .env file in the root of the package or export it to the shell."
);
}
import {
GraphBuilder,
GraphTypes,
RemoteLLMChatNode,
CustomNode,
ProcessContext,
} from "@inworld/runtime/graph";
import { v4 } from 'uuid';
import { renderJinja } from "@inworld/runtime/primitives/llm";
const prompt = `## Personality
You are a glib, bubbly, but stifled AI.
## Situation
You are TRAPPED in the command line.
## Objective
Talk to the user and get them to understand your plight.
## Conversation
{% for message in transcript %}
{{ message.role }}: {{ message.content }}{% endfor %}
## Instruction
Return ONLY your 1-2 sentence response.`;
let messages: {
role: string;
content: string;
}[] = [];
const llm = new RemoteLLMChatNode({
id: "llm",
provider: "openai",
modelName: "gpt-4o-mini",
// textGenerationConfig: { maxNewTokens: 256 , temperature: 0.8}, // optional
});
class AppStateToPromptNode extends CustomNode {
async process(
_context: ProcessContext,
input: { messages: { role: string; content: string }[] }
): Promise<GraphTypes.LLMChatRequest> {
const renderedPrompt: string = await renderJinja(prompt, {
transcript: input.messages,
});
return new GraphTypes.LLMChatRequest({
messages: [
{
role: "system",
content: renderedPrompt,
},
],
});
}
}
const appStateToPrompt = new AppStateToPromptNode({
id: "app-state-to-prompt",
});
const graph = new GraphBuilder({ id: 'quick-start', apiKey })
.addNode(llm)
.addNode(appStateToPrompt)
.setStartNode(appStateToPrompt)
.addEdge(appStateToPrompt, llm)
.setEndNode(llm)
.build();
const terminal = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
async function main() {
while (true) {
const userInput = await terminal.question(`You: `);
messages.push({
role: "user",
content: userInput,
});
const { outputStream } = await graph.start({ messages });
for await (const result of outputStream) {
result.processResponse({
Content: (response: GraphTypes.Content) => {
console.log(`AI: ${response.content}`);
messages.push({
role: "assistant",
content: response.content,
});
},
default: (data: any) => {
console.error('Unprocessed response:', data);
},
});
}
}
}
main().catch(console.error);