-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchat.js
More file actions
60 lines (52 loc) · 1.49 KB
/
chat.js
File metadata and controls
60 lines (52 loc) · 1.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import { BedrockChat } from "@langchain/community/chat_models/bedrock";
import { RedisChatMessageHistory } from "@langchain/redis";
//import { ValkeyChatMessageHistory } from "./valkey_chat_history.js";
import { ConversationChain } from "langchain/chains";
import { BufferMemory } from "langchain/memory";
import prompt from "prompt";
import {
ChatPromptTemplate,
MessagesPlaceholder,
} from "@langchain/core/prompts";
const chatPrompt = ChatPromptTemplate.fromMessages([
[
"system",
"The following is a friendly conversation between a human and an AI.",
],
new MessagesPlaceholder("chat_history"),
["human", "{input}"],
]);
const memory = new BufferMemory({
chatHistory: new RedisChatMessageHistory({
//chatHistory: new ValkeyChatMessageHistory({
sessionId: new Date().toISOString(),
sessionTTL: 300,
host: "localhost",
port: 6379,
}),
returnMessages: true,
memoryKey: "chat_history",
});
const model = "anthropic.claude-3-sonnet-20240229-v1:0"
const region = "us-east-1"
const langchainBedrockChatModel = new BedrockChat({
model: model,
region: region,
modelKwargs: {
anthropic_version: "bedrock-2023-05-31",
},
});
const chain = new ConversationChain({
llm: langchainBedrockChatModel,
memory: memory,
//verbose: true,
prompt: chatPrompt,
});
while (true) {
prompt.start({noHandleSIGINT: true});
const {message} = await prompt.get(['message']);
const response = await chain.invoke({
input: message,
});
console.log(response);
}