import 'dotenv/config';
import {
  CustomNode,
  GraphBuilder,
  GraphTypes,
  ProcessContext,
  RemoteLLMChatNode
} from '@inworld/runtime/graph';
let poemPrompt = "Return ONLY a limerick about: "
let reviewPrompt = `Review these two poems and analyze which one is better.`
// Define a custom node which turns a prompt into messages for an LLM
class PoemPromptNode extends CustomNode {
    process(context: ProcessContext, input: string): GraphTypes.LLMChatRequest {
        let composedPrompt = poemPrompt + input
        return new GraphTypes.LLMChatRequest({
            messages: [
                {
                    role: 'user',
                    content: composedPrompt,
                },
            ]
        });
    }
}
class ReviewPromptNode extends CustomNode {
    process(context: ProcessContext, poem1: GraphTypes.Content, poem2: GraphTypes.Content): GraphTypes.LLMChatRequest {
        let composedPrompt = `${reviewPrompt}\n\nPoem 1:\n\n${poem1.content}\n\nPoem 2:\n\n${poem2.content}`
        return new GraphTypes.LLMChatRequest({
            messages: [
                {
                    role: 'user',
                    content: composedPrompt,
                },
            ]
        });
    }
}
let reviewPromptNode = new ReviewPromptNode({
    id: 'review-prompt-node',
});
let poemPromptNode = new PoemPromptNode({
    id: 'poem-prompt-node',
});
let openaiLLMNode = new RemoteLLMChatNode({
    id: 'openai-llm-node',
    modelName: 'gpt-4o-mini',
    provider: 'openai',
    textGenerationConfig: {
        maxNewTokens: 1000,
    },
    reportToClient: true
});
let anthropicLLMNode = new RemoteLLMChatNode({
    id: 'anthropic-llm-node',
    modelName: 'claude-3-5-haiku-latest',   
    provider: 'anthropic',
    textGenerationConfig: {
        maxNewTokens: 1000,
    },
    reportToClient: true
});
let googleLLMNode = new RemoteLLMChatNode({
    id: 'google-llm-node',
    modelName: 'gemini-2.0-flash',  
    provider: 'google',
    textGenerationConfig: {
        maxNewTokens: 1000,
    },
    reportToClient: true
});
// Creating a graph builder instance and adding the node to it
const graphBuilder = new GraphBuilder({
  id: 'custom-text-node',
  apiKey: process.env.INWORLD_API_KEY,
  enableRemoteConfig: false
})
  .addNode(poemPromptNode)
  .addNode(reviewPromptNode)
  .addNode(openaiLLMNode)
  .addNode(anthropicLLMNode)
  .addNode(googleLLMNode)
  .addEdge(poemPromptNode, openaiLLMNode)
  .addEdge(poemPromptNode, anthropicLLMNode)
  .addEdge(anthropicLLMNode, reviewPromptNode)
  .addEdge(openaiLLMNode, reviewPromptNode)
  .addEdge(reviewPromptNode, googleLLMNode)
  .setStartNode(poemPromptNode)
  .setEndNode(googleLLMNode);
// Creating an executor instance from the graph builder
const executor = graphBuilder.build();
executor.visualize('graph.png')
main();
// Main function that executes the graph
async function main() {
  // Execute graph and waiting for output stream to be returned.
  const { outputStream } = executor.start('pizza');
  for await (const event of outputStream) {
    await event.processResponse({
        Content: (data: GraphTypes.Content) => {
            console.log(`\n${data.content}\n`)
        },
    })
  }
}