|
|
@@ -32,7 +32,7 @@ import { zhCN } from 'date-fns/locale'
|
|
|
import { GameStatus } from './enums/game-status.enum'
|
|
|
import dedent from 'dedent'
|
|
|
import { Serialized } from 'langchain/dist/load/serializable'
|
|
|
-import { HumanMessage, LLMResult, SystemMessage } from 'langchain/schema'
|
|
|
+import { BaseMessageLike, HumanMessage, LLMResult, SystemMessage } from 'langchain/schema'
|
|
|
import { CallbackManager } from 'langchain/callbacks'
|
|
|
import { setTimeout } from 'timers/promises'
|
|
|
import { EventsGateway } from '../events/events.gateway'
|
|
|
@@ -48,6 +48,13 @@ import { InitGame } from './dto/init-game.dto'
|
|
|
export class GameService implements OnModuleInit {
|
|
|
private readonly logger = new Logger(DanmuService.name)
|
|
|
private llm: ChatOpenAI
|
|
|
+ private gpt35: ChatOpenAI
|
|
|
+ private gpt4: ChatOpenAI
|
|
|
+ private gpt35CF: ChatOpenAI
|
|
|
+ private gpt4CF: ChatOpenAI
|
|
|
+ private gpt35Azure: ChatOpenAI
|
|
|
+ private gpt4Azure: ChatOpenAI
|
|
|
+
|
|
|
private controllers: Map<number, RunningController> = new Map()
|
|
|
|
|
|
constructor(
|
|
|
@@ -67,31 +74,79 @@ export class GameService implements OnModuleInit {
|
|
|
|
|
|
async onModuleInit() {
|
|
|
const self = this
|
|
|
- this.llm = new ChatOpenAI({
|
|
|
+ const cb = CallbackManager.fromHandlers({
|
|
|
+ async handleLLMStart(llm: Serialized, prompts: string[]) {
|
|
|
+ self.logger.log(`[LLM Start]LLM: ${JSON.stringify(llm)}`)
|
|
|
+ self.logger.log(`['LLM Start]Prompts: ${prompts.join('\n')}`)
|
|
|
+ },
|
|
|
+ async handleLLMEnd(output: LLMResult) {
|
|
|
+ self.logger.log(
|
|
|
+ `[LLM End]${output.generations
|
|
|
+ .reduce((acc, cur) => acc.concat(cur), [])
|
|
|
+ .map((i) => i.text)
|
|
|
+ .join('\n')}`
|
|
|
+ )
|
|
|
+ self.logger.log(`[LLM End]${JSON.stringify(output.llmOutput)}`)
|
|
|
+ },
|
|
|
+ async handleLLMError(error: Error) {
|
|
|
+ self.logger.error(error)
|
|
|
+ }
|
|
|
+ })
|
|
|
+
|
|
|
+ const onFailedAttempt = (error) => {
|
|
|
+ self.logger.error(error)
|
|
|
+ }
|
|
|
+
|
|
|
+ const baseParam = {
|
|
|
+ openAIApiKey: process.env.OPENAI_API_KEY,
|
|
|
+ modelName: 'gpt-3.5-turbo-0613',
|
|
|
+ callbackManager: cb,
|
|
|
+ onFailedAttempt
|
|
|
+ }
|
|
|
+
|
|
|
+ const azureBaseParam = {
|
|
|
// openAIApiKey: process.env.OPENAI_API_KEY,
|
|
|
// modelName: 'gpt-3.5-turbo-0613',
|
|
|
azureOpenAIApiKey: process.env.AZURE_OPENAI_KEY,
|
|
|
azureOpenAIApiVersion: process.env.AZURE_OPENAI_VERSION,
|
|
|
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_INSTANCE,
|
|
|
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_DEPLOYMENT,
|
|
|
- callbackManager: CallbackManager.fromHandlers({
|
|
|
- async handleLLMStart(llm: Serialized, prompts: string[]) {
|
|
|
- self.logger.log(`[LLM Start]LLM: ${JSON.stringify(llm)}`)
|
|
|
- self.logger.log(`['LLM Start]Prompts: ${prompts.join('\n')}`)
|
|
|
- },
|
|
|
- async handleLLMEnd(output: LLMResult) {
|
|
|
- self.logger.log(
|
|
|
- `[LLM End]${output.generations
|
|
|
- .reduce((acc, cur) => acc.concat(cur), [])
|
|
|
- .map((i) => i.text)
|
|
|
- .join('\n')}`
|
|
|
- )
|
|
|
- self.logger.log(`[LLM End]${JSON.stringify(output.llmOutput)}`)
|
|
|
- },
|
|
|
- async handleLLMError(error: Error) {
|
|
|
- Logger.error(error)
|
|
|
- }
|
|
|
- })
|
|
|
+ callbackManager: cb,
|
|
|
+ onFailedAttempt
|
|
|
+ }
|
|
|
+
|
|
|
+ this.llm = new ChatOpenAI({
|
|
|
+ ...azureBaseParam
|
|
|
+ })
|
|
|
+ this.gpt35 = new ChatOpenAI({
|
|
|
+ ...baseParam,
|
|
|
+ modelName: 'gpt-3.5-turbo-0613'
|
|
|
+ })
|
|
|
+ this.gpt4 = new ChatOpenAI({
|
|
|
+ ...baseParam,
|
|
|
+ modelName: 'gpt-4'
|
|
|
+ })
|
|
|
+ this.gpt35CF = new ChatOpenAI({
|
|
|
+ ...baseParam,
|
|
|
+ modelName: 'gpt-3.5-turbo-0613',
|
|
|
+ configuration: {
|
|
|
+ baseURL: 'https://openai.c8c.top/v1'
|
|
|
+ }
|
|
|
+ })
|
|
|
+ this.gpt4CF = new ChatOpenAI({
|
|
|
+ ...baseParam,
|
|
|
+ modelName: 'gpt-4',
|
|
|
+ configuration: {
|
|
|
+ baseURL: 'https://openai.c8c.top/v1'
|
|
|
+ }
|
|
|
+ })
|
|
|
+ this.gpt35Azure = new ChatOpenAI({
|
|
|
+ ...azureBaseParam,
|
|
|
+ azureOpenAIApiDeploymentName: 'gpt-35-turbo'
|
|
|
+ })
|
|
|
+ this.gpt4Azure = new ChatOpenAI({
|
|
|
+ ...azureBaseParam,
|
|
|
+ azureOpenAIApiDeploymentName: 'gpt-4'
|
|
|
})
|
|
|
}
|
|
|
|
|
|
@@ -130,6 +185,38 @@ export class GameService implements OnModuleInit {
|
|
|
return await paginate<Charactor>(this.charactorRepository, req.page, req.search)
|
|
|
}
|
|
|
|
|
|
+ async callLLM(messages: BaseMessageLike[], options?, callbacks?) {
|
|
|
+ const { value: model } = await this.sysConfigService.findByName('model')
|
|
|
+ let llm: ChatOpenAI
|
|
|
+ switch (model) {
|
|
|
+ case 'gpt-3.5':
|
|
|
+ llm = this.gpt35
|
|
|
+ break
|
|
|
+ case 'gpt-4':
|
|
|
+ llm = this.gpt4
|
|
|
+ break
|
|
|
+ case 'cf-gpt-3.5':
|
|
|
+ llm = this.gpt35CF
|
|
|
+ break
|
|
|
+ case 'cf-gpt-4':
|
|
|
+ llm = this.gpt4CF
|
|
|
+ break
|
|
|
+ case 'azure-gpt-3.5':
|
|
|
+ llm = this.gpt35Azure
|
|
|
+ break
|
|
|
+ case 'azure-gpt-4':
|
|
|
+ llm = this.gpt4Azure
|
|
|
+ break
|
|
|
+ default:
|
|
|
+ llm = this.llm
|
|
|
+ }
|
|
|
+ return await llm.call(messages, options, callbacks)
|
|
|
+ }
|
|
|
+
|
|
|
+ async emitEvent(id: string, event: any) {
|
|
|
+ this.eventsGateway.emitEvent(id, event)
|
|
|
+ }
|
|
|
+
|
|
|
async genCharactor(background: string, num: number) {
|
|
|
const parser = StructuredOutputParser.fromZodSchema(
|
|
|
z.array(
|
|
|
@@ -153,7 +240,7 @@ export class GameService implements OnModuleInit {
|
|
|
num,
|
|
|
background
|
|
|
})
|
|
|
- const response = await this.llm.call([new HumanMessage(input)])
|
|
|
+ const response = await this.callLLM([new HumanMessage(input)])
|
|
|
const output = await parser.parse(response.content)
|
|
|
return output
|
|
|
}
|
|
|
@@ -259,7 +346,7 @@ export class GameService implements OnModuleInit {
|
|
|
charactors: this.formatCharactors(gameState.charactors),
|
|
|
datetime: this.formatDatetime(gameState.date, gameState.time)
|
|
|
})
|
|
|
- const response = await this.llm.call([new HumanMessage(input)])
|
|
|
+ const response = await this.callLLM([new HumanMessage(input)])
|
|
|
gameState.plot = response.content
|
|
|
}
|
|
|
|
|
|
@@ -345,7 +432,7 @@ export class GameService implements OnModuleInit {
|
|
|
: ''
|
|
|
})
|
|
|
|
|
|
- const response = await this.llm.call([new HumanMessage(input)])
|
|
|
+ const response = await this.callLLM([new HumanMessage(input)])
|
|
|
plot = response.content
|
|
|
|
|
|
subject.next({
|
|
|
@@ -437,7 +524,7 @@ export class GameService implements OnModuleInit {
|
|
|
summary: summary
|
|
|
})
|
|
|
|
|
|
- const response = await this.llm.call([new HumanMessage(input)])
|
|
|
+ const response = await this.callLLM([new HumanMessage(input)])
|
|
|
plot = response.content
|
|
|
|
|
|
subject.next({
|
|
|
@@ -518,7 +605,7 @@ export class GameService implements OnModuleInit {
|
|
|
while (!stop) {
|
|
|
const history: GameState[] = await this.getHistory(game.currentState, [])
|
|
|
const lastState = history[history.length - 1]
|
|
|
- if (lastState.options && lastState.options.length > 0) {
|
|
|
+ if (!choice && lastState.options && lastState.options.length > 0) {
|
|
|
choice = lastState.options[Math.floor(Math.random() * lastState.options.length)].content
|
|
|
}
|
|
|
const subject = new Subject<GameEvent>()
|
|
|
@@ -526,17 +613,13 @@ export class GameService implements OnModuleInit {
|
|
|
const self = this
|
|
|
subject.subscribe({
|
|
|
async next(evt) {
|
|
|
+ self.emitEvent(`${id}`, evt)
|
|
|
switch (evt.type) {
|
|
|
case 'plot':
|
|
|
self.logger.log('生成剧情')
|
|
|
break
|
|
|
case 'options':
|
|
|
- self.makeVotes(
|
|
|
- id,
|
|
|
- evt.data.map((i) => i.content),
|
|
|
- new Date(),
|
|
|
- voteSubject
|
|
|
- )
|
|
|
+ self.makeVotes(id, evt.data, new Date(), voteSubject)
|
|
|
break
|
|
|
case 'newCharactor':
|
|
|
break
|
|
|
@@ -562,6 +645,7 @@ export class GameService implements OnModuleInit {
|
|
|
}
|
|
|
this.logger.log('等待投票结束')
|
|
|
choice = await lastValueFrom(voteSubject)
|
|
|
+ this.logger.log(`选择: ${choice}`)
|
|
|
addCharactor = null
|
|
|
if (lastState.charactors.filter((i) => !i.dead).length < 8) {
|
|
|
const danmu = await this.danmuService.pickAsCharactor(id, lastState.createdAt)
|
|
|
@@ -581,23 +665,32 @@ export class GameService implements OnModuleInit {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- async makeVotes(gameId: number, options: string[], startTime: Date, subject: Subject<string>) {
|
|
|
+ async makeVotes(gameId: number, options: any[], startTime: Date, subject: Subject<string>) {
|
|
|
try {
|
|
|
this.logger.log('发起投票')
|
|
|
let s = differenceInSeconds(new Date(), startTime)
|
|
|
- while (s < 60) {
|
|
|
- const votes = await this.danmuService.collectVotes(gameId, startTime, options.length)
|
|
|
+ while (s < 30) {
|
|
|
await setTimeout(1000)
|
|
|
+ const votes = await this.danmuService.collectVotes(gameId, startTime, options.length)
|
|
|
+ this.emitEvent(`${gameId}`, { type: 'votes', data: votes })
|
|
|
this.logger.log(`${s} 投票进度${JSON.stringify(votes)}`)
|
|
|
s = differenceInSeconds(new Date(), startTime)
|
|
|
}
|
|
|
const votes = await this.danmuService.collectVotes(gameId, startTime, options.length)
|
|
|
+ this.emitEvent(`${gameId}`, { type: 'votes', data: votes })
|
|
|
this.logger.log(`投票结束: ${JSON.stringify(votes)}`)
|
|
|
const max = Math.max(...votes)
|
|
|
const indices = votes.map((i, index) => (i === max ? index : -1)).filter((i) => i >= 0)
|
|
|
const indexOfMaxValue = indices[Math.floor(Math.random() * indices.length)]
|
|
|
- this.logger.log(`选择: ${indexOfMaxValue} - ${options[indexOfMaxValue]}`)
|
|
|
- subject.next(options[indexOfMaxValue])
|
|
|
+ this.emitEvent(`${gameId}`, {
|
|
|
+ type: 'voteResult',
|
|
|
+ data: {
|
|
|
+ index: indexOfMaxValue,
|
|
|
+ option: options[indexOfMaxValue]
|
|
|
+ }
|
|
|
+ })
|
|
|
+ this.logger.log(`选择: ${indexOfMaxValue} - ${JSON.stringify(options[indexOfMaxValue])}`)
|
|
|
+ subject.next(options[indexOfMaxValue].content)
|
|
|
} catch (error) {
|
|
|
this.logger.error(error)
|
|
|
subject.error(error)
|
|
|
@@ -631,8 +724,8 @@ export class GameService implements OnModuleInit {
|
|
|
const input = await prompt.format({
|
|
|
text
|
|
|
})
|
|
|
- const response = await this.llm.call([new HumanMessage(input)])
|
|
|
- const output = await parser.parse(response.content)
|
|
|
+ const response = await this.callLLM([new HumanMessage(input)])
|
|
|
+ const output = await parser.parse(response.content.replace('+', ''))
|
|
|
return output.map((i) => {
|
|
|
i.modifyHp = i.modifyHp.filter((j) => charactors.find((k) => k.name === j.name))
|
|
|
return i
|
|
|
@@ -686,7 +779,7 @@ export class GameService implements OnModuleInit {
|
|
|
.join('\n\n')
|
|
|
})
|
|
|
}
|
|
|
- const response = await this.llm.call([new HumanMessage(input)])
|
|
|
+ const response = await this.callLLM([new HumanMessage(input)])
|
|
|
|
|
|
return response.content
|
|
|
}
|
|
|
@@ -719,7 +812,7 @@ export class GameService implements OnModuleInit {
|
|
|
base,
|
|
|
background: game.background
|
|
|
})
|
|
|
- const response = await this.llm.call([
|
|
|
+ const response = await this.callLLM([
|
|
|
new SystemMessage('你是一个富有想象力的写作助手,你的任务是帮我想象一个小说里的角色。'),
|
|
|
new HumanMessage(input)
|
|
|
])
|
|
|
@@ -757,7 +850,7 @@ export class GameService implements OnModuleInit {
|
|
|
summary,
|
|
|
text: newPlot
|
|
|
})
|
|
|
- const response = await this.llm.call([
|
|
|
+ const response = await this.callLLM([
|
|
|
new SystemMessage('你来扮演一个文字冒险游戏,你现在的任务是根据当前剧情随机调整角色的HP值'),
|
|
|
new HumanMessage(input)
|
|
|
])
|