-
-
Notifications
You must be signed in to change notification settings - Fork 19
/
Copy pathindex.ts
112 lines (99 loc) · 4.1 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}