|
|
@@ -5,20 +5,28 @@ import dedent from 'dedent'
|
|
|
import { ConversationChain } from 'langchain/chains'
|
|
|
import { BufferMemory, BufferWindowMemory } from 'langchain/memory'
|
|
|
import { Logger } from '@nestjs/common'
|
|
|
-
|
|
|
+import { writeFile, mkdirSync, appendFileSync } from 'fs'
|
|
|
+import path = require('path')
|
|
|
+require('dotenv').config()
|
|
|
export function createLLM() {
|
|
|
const usage = { completionTokens: 0, promptTokens: 0, totalTokens: 0 }
|
|
|
const llm = new ChatOpenAI({
|
|
|
openAIApiKey: process.env.OPENAI_API_KEY,
|
|
|
- modelName: 'gpt-3.5-turbo-16k',
|
|
|
+ // openAIApiKey: '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',
|
|
|
+ modelName: 'gpt-3.5-turbo-1106',
|
|
|
timeout: 1000 * 60 * 5,
|
|
|
configuration: {
|
|
|
baseURL: 'https://openai.c8c.top/v1'
|
|
|
},
|
|
|
+ maxRetries: 4,
|
|
|
callbackManager: CallbackManager.fromHandlers({
|
|
|
async handleLLMStart(llm, prompts) {
|
|
|
Logger.log(`[LLM Start]LLM: ${JSON.stringify(llm)}`)
|
|
|
Logger.log(`['LLM Start]Prompts: ${prompts.join('\n')}`)
|
|
|
+
|
|
|
+ const logFile = path.join(__dirname, 'llm.log')
|
|
|
+ appendFileSync(logFile, '\n--------------------------------------\n')
|
|
|
+ appendFileSync(logFile, prompts.join('\n'))
|
|
|
},
|
|
|
async handleLLMEnd(output) {
|
|
|
Logger.log(
|
|
|
@@ -31,13 +39,23 @@ export function createLLM() {
|
|
|
usage.completionTokens += output.llmOutput.tokenUsage.completionTokens
|
|
|
usage.promptTokens += output.llmOutput.tokenUsage.promptTokens
|
|
|
usage.totalTokens += output.llmOutput.tokenUsage.totalTokens
|
|
|
+
|
|
|
+ const logFile = path.join(__dirname, 'llm.log')
|
|
|
+ appendFileSync(logFile, '\n--------------------------------------\n')
|
|
|
+ appendFileSync(
|
|
|
+ logFile,
|
|
|
+ output.generations
|
|
|
+ .reduce((acc, cur) => acc.concat(cur), [])
|
|
|
+ .map((i) => i.text)
|
|
|
+ .join('\n')
|
|
|
+ )
|
|
|
},
|
|
|
async handleLLMError(error) {
|
|
|
Logger.error(error)
|
|
|
}
|
|
|
}),
|
|
|
onFailedAttempt(error) {
|
|
|
- Logger.error(error)
|
|
|
+ Logger.error(error.message, error.stack, 'OpenAI API Error')
|
|
|
}
|
|
|
|
|
|
// configuration: {
|
|
|
@@ -45,14 +63,14 @@ export function createLLM() {
|
|
|
// },
|
|
|
})
|
|
|
|
|
|
- function conversation(system) {
|
|
|
+ function conversation(system, k = 4) {
|
|
|
const chatPrompt = ChatPromptTemplate.fromMessages([
|
|
|
['system', system],
|
|
|
new MessagesPlaceholder('history'),
|
|
|
['human', '{input}']
|
|
|
])
|
|
|
const memory = new BufferWindowMemory({
|
|
|
- k: 4,
|
|
|
+ k,
|
|
|
memoryKey: 'history',
|
|
|
returnMessages: true
|
|
|
})
|