|
|
@@ -9,31 +9,24 @@ import { writeFile, mkdirSync, appendFileSync } from 'fs'
|
|
|
import path = require('path')
|
|
|
import { LLMTools } from './types'
|
|
|
require('dotenv').config()
|
|
|
-export function createLLM(model = 'gpt-3.5-turbo-16k') : LLMTools {
|
|
|
+export function createLLM(options?: {
|
|
|
+ model?: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-4' | 'gpt-4-32k'
|
|
|
+ provider?: 'azure' | 'openai'
|
|
|
+}): LLMTools {
|
|
|
+ options = options || {
|
|
|
+ model: 'gpt-3.5-turbo',
|
|
|
+ provider: 'azure'
|
|
|
+ }
|
|
|
+ options.model = options.model || 'gpt-3.5-turbo'
|
|
|
+ options.provider = options.provider || 'azure'
|
|
|
const usage = { completionTokens: 0, promptTokens: 0, totalTokens: 0 }
|
|
|
- const llm = new ChatOpenAI({
|
|
|
- openAIApiKey: process.env.OPENAI_API_KEY,
|
|
|
- modelName: model,
|
|
|
-
|
|
|
- // azureOpenAIApiKey: process.env.AZURE_OPENAI_KEY,
|
|
|
- // azureOpenAIApiVersion: process.env.AZURE_OPENAI_VERSION,
|
|
|
- // azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_INSTANCE,
|
|
|
- // azureOpenAIApiDeploymentName: 'gpt-35-turbo-16k',
|
|
|
- // azureOpenAIBasePath:`https://gateway.ai.cloudflare.com/v1/cc11bed478e65817d30878b0a796422d/openai-gateway/azure-openai/${process.env.AZURE_OPENAI_INSTANCE}`,
|
|
|
-
|
|
|
- timeout: 1000 * 60 * 5,
|
|
|
- configuration: {
|
|
|
- baseURL: process.env.OPENAI_BASE_URL
|
|
|
- },
|
|
|
+ const commonParams = {
|
|
|
maxRetries: 4,
|
|
|
+ timeout: 1000 * 60 * 2,
|
|
|
callbackManager: CallbackManager.fromHandlers({
|
|
|
async handleLLMStart(llm, prompts) {
|
|
|
Logger.log(`[LLM Start]LLM: ${JSON.stringify(llm)}`)
|
|
|
Logger.log(`['LLM Start]Prompts: ${prompts.join('\n')}`)
|
|
|
-
|
|
|
- const logFile = path.join(__dirname, 'llm.log')
|
|
|
- appendFileSync(logFile, '\n--------------------------------------\n')
|
|
|
- appendFileSync(logFile, prompts.join('\n'))
|
|
|
},
|
|
|
async handleLLMEnd(output) {
|
|
|
Logger.log(
|
|
|
@@ -46,16 +39,6 @@ export function createLLM(model = 'gpt-3.5-turbo-16k') : LLMTools {
|
|
|
usage.completionTokens += output.llmOutput.tokenUsage.completionTokens
|
|
|
usage.promptTokens += output.llmOutput.tokenUsage.promptTokens
|
|
|
usage.totalTokens += output.llmOutput.tokenUsage.totalTokens
|
|
|
-
|
|
|
- const logFile = path.join(__dirname, 'llm.log')
|
|
|
- appendFileSync(logFile, '\n--------------------------------------\n')
|
|
|
- appendFileSync(
|
|
|
- logFile,
|
|
|
- output.generations
|
|
|
- .reduce((acc, cur) => acc.concat(cur), [])
|
|
|
- .map((i) => i.text)
|
|
|
- .join('\n')
|
|
|
- )
|
|
|
},
|
|
|
async handleLLMError(error) {
|
|
|
Logger.error(error)
|
|
|
@@ -64,11 +47,49 @@ export function createLLM(model = 'gpt-3.5-turbo-16k') : LLMTools {
|
|
|
onFailedAttempt(error) {
|
|
|
Logger.error(error.message, error.stack, 'OpenAI API Error')
|
|
|
}
|
|
|
+ }
|
|
|
+ let llm
|
|
|
+ if (options.provider === 'openai') {
|
|
|
+ llm = new ChatOpenAI({
|
|
|
+ ...commonParams,
|
|
|
+ openAIApiKey: process.env.OPENAI_API_KEY,
|
|
|
+ modelName: options.model,
|
|
|
+
|
|
|
+ // azureOpenAIApiKey: process.env.AZURE_OPENAI_KEY,
|
|
|
+ // azureOpenAIApiVersion: process.env.AZURE_OPENAI_VERSION,
|
|
|
+ // azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_INSTANCE,
|
|
|
+ // azureOpenAIApiDeploymentName: 'gpt-35-turbo-16k',
|
|
|
+ // azureOpenAIBasePath:`https://gateway.ai.cloudflare.com/v1/cc11bed478e65817d30878b0a796422d/openai-gateway/azure-openai/${process.env.AZURE_OPENAI_INSTANCE}`,
|
|
|
|
|
|
- // configuration: {
|
|
|
- // baseURL: "https://openai.c8c.top/v1",
|
|
|
- // },
|
|
|
- })
|
|
|
+ configuration: {
|
|
|
+ baseURL: process.env.OPENAI_BASE_URL
|
|
|
+ }
|
|
|
+ })
|
|
|
+ } else {
|
|
|
+ let azureDeployment = 'gpt-35'
|
|
|
+ switch (options.model) {
|
|
|
+ case 'gpt-3.5-turbo':
|
|
|
+ azureDeployment = 'gpt-35-turbo'
|
|
|
+ break
|
|
|
+ case 'gpt-3.5-turbo-16k':
|
|
|
+ azureDeployment = 'gpt-35-turbo-16k'
|
|
|
+ break
|
|
|
+ case 'gpt-4':
|
|
|
+ azureDeployment = 'gpt-4'
|
|
|
+ break
|
|
|
+ case 'gpt-4-32k':
|
|
|
+ azureDeployment = 'gpt-4-32k'
|
|
|
+ break
|
|
|
+ }
|
|
|
+ llm = new ChatOpenAI({
|
|
|
+ ...commonParams,
|
|
|
+ azureOpenAIApiKey: process.env.AZURE_OPENAI_KEY,
|
|
|
+ azureOpenAIApiVersion: process.env.AZURE_OPENAI_VERSION,
|
|
|
+ azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_INSTANCE,
|
|
|
+ azureOpenAIApiDeploymentName: azureDeployment,
|
|
|
+ azureOpenAIBasePath: `https://gateway.ai.cloudflare.com/v1/cc11bed478e65817d30878b0a796422d/openai-gateway/azure-openai/${process.env.AZURE_OPENAI_INSTANCE}`
|
|
|
+ })
|
|
|
+ }
|
|
|
|
|
|
function conversation(system, k = 4) {
|
|
|
const chatPrompt = ChatPromptTemplate.fromMessages([
|