-
Notifications
You must be signed in to change notification settings - Fork 1
/
shouldReply.js
81 lines (68 loc) · 2.28 KB
/
shouldReply.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { OllamaFunctions } from "langchain/experimental/chat_models/ollama_functions";
import { JsonOutputFunctionsParser } from "langchain/output_parsers";
import { PromptTemplate } from "@langchain/core/prompts";
import { getLastXMessages } from "./src/memory/chatlogFunctions.js";
async function getLastThreeMessages(id, k, channelType) {
const messages = await getLastXMessages(id, k, channelType);
const formattedMessages = messages
.map((msg) => `${msg.name}: ${msg.clean_content}`)
.join("\n");
return formattedMessages;
}
// Define the analysis template
const ANALYSIS_TEMPLATE = `Analyze the following passage and determine if "Tensor" should reply next. Use the following criteria to make the decision:
Criteria:
1. If Tensor was asked a question.
2. If the conversation is between two people and its Tensor's turn to talk.
3. If Tensor was directly mentioned.
The response should be a JSON object with the key "shouldreply" and the value should be either true or false.
Passage:
{input}
`;
const prompt = PromptTemplate.fromTemplate(ANALYSIS_TEMPLATE);
// Define the simplified schema using Zod
const schema = z.object({
shouldreply: z.boolean().describe("Whether the person should reply next"),
});
// Initialize and bind the model
const model = new OllamaFunctions({
temperature: 0.1,
model: "llama3",
}).bind({
functions: [
{
name: "should_reply",
description: "Determines if the person should reply next.",
parameters: {
type: "object",
properties: zodToJsonSchema(schema).properties,
},
},
],
function_call: {
name: "should_reply",
},
});
// Use a JsonOutputFunctionsParser to get the parsed JSON response directly
const chain = await prompt
.pipe(model)
.pipe(new JsonOutputFunctionsParser(schema));
// get the last 3 messages from a channel
const messages = await getLastThreeMessages(
"1148253350404575352",
3,
"channel"
);
// Invoke the model with an input passage
const response = await chain.invoke({
input: messages,
});
console.log(messages);
console.log("\nShould Tensor reply next?");
if (response.shouldreply) {
console.log("Yes, Tensor should reply next.");
} else {
console.log("No, Tensor should not reply next.");
}