xiongzhu il y a 2 ans
Parent
commit
e931d76c6a
7 fichiers modifiés avec 194 ajouts et 209 suppressions
  1. 0 15
      g.mjs
  2. 49 0
      llm.mjs
  3. 0 149
      pem.mjs
  4. 0 4
      sdfsdf.mjs
  5. 9 0
      src/game/game.controller.ts
  6. 134 41
      src/game/game.service.ts
  7. 2 0
      src/room/room.controller.ts

+ 0 - 15
g.mjs

@@ -1,15 +0,0 @@
-#!/usr/bin/env node
-import fs from 'fs'
-import path from 'path'
-import inquirer from 'inquirer'
-import { input, checkbox } from '@inquirer/prompts'
-
-const name = await input({
-    message: 'What is your name?',
-    initial: 'John Doe'
-})
-const items = await checkbox({
-    message: 'Select toppings',
-    choices: ['foo', 'bar', 'baz']
-})
-console.log(`Hello ${name}!`)

+ 49 - 0
llm.mjs

@@ -0,0 +1,49 @@
+import { ChatOpenAI } from 'langchain/chat_models/openai'
+import fs from 'fs'
+const llm = new ChatOpenAI({
+    openAIApiKey: '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',
+    modelName: 'gpt-3.5-turbo-0613',
+    // azureOpenAIApiKey: '62dd8a1466524c64967810c692f0197e',
+    // azureOpenAIApiVersion: '2023-03-15-preview',
+    // azureOpenAIApiInstanceName: 'zouma1',
+    // azureOpenAIApiDeploymentName: 'gpt-35-turbo',
+    maxRetries: 3,
+    configuration: {
+        baseURL: 'https://openai.c8c.top/v1'
+    },
+    onFailedAttempt: (error) => {
+        console.log(error.code)
+    }
+})
+
+try {
+    const response = await llm.call(
+        [
+            `您的任务是为我的小说剧情产生一个最终的的摘要
+我们已经提供了一个到某个剧情节点的摘要: "在10月8日晚上,约翰成功逃脱了一个致命的陷阱,并揭示了黑人岛的秘密组织。他发现组织计划在10月9日使用毒药,并决定利用爆炸物摧毁实验室。虽然受到了特工的追捕,但他成功地返回岛上并警告其他宾客。随着他们的团结行动,实验室被摧毁,组织的阴谋被阻止。10月10日下午,艾米丽发现了一辆藏匿在岛上的秘密列车,并决定和其他宾客潜入列车,寻找更多线索。
+
+他们来到了列车尽头的车厢,发现一间隐藏房间,里面摆满了各种神秘设备。伊莎贝拉发现了一本笔记,上面详细记录了组织计划在全球范围内发动一系列恐怖袭击。亚历山大辨别出笔记上的手写字迹中隐藏着一个名字:"亨德里克森"。艾米丽立刻想到之前一名追捕她们的特工,伊莎贝拉勉强钦定了他为卧底。他们决心找到亨德里克森并阻止恐怖袭击的发生。
+
+10月10日晚上,艾米丽和伊莎贝拉融入了恐怖分子的集会,暗中侦查,寻找关键情报。突然,一个小女孩走向她们,递给她们一个笔记本,上面写着:“这个集会是一个陷阱,你们需要离开。”艾米丽和伊莎贝拉急忙转身,却发现集会已被锁住。惊慌失措中,他们注意到聚光灯瞄向他们,突然,枪声响起!一连串的枪击声中,艾米丽和伊莎贝拉奋力回击。在血雨腥风中,她们成功逃脱集会,但却不知道真正的陷阱即将到来。
+
+10月11日上午,亚历山大潜入组织总部的一楼实验室,发现了一批正在研发的新型毒气。他决定找到控制室,关闭实验室门,阻止毒气散播。艾米丽、威廉和伊莎贝拉在外面保护他。亚历山大穿越走廊,躲避巡逻的特工,终于进入控制室。他成功关闭了门,但突然触发了一个警报,组织的特工们开始围攻控制室。亚历山大抵御着攻击,艾米丽、威廉和伊莎贝拉赶到,提供支援。在惊心动魄的枪战中,他们勉力保护亚历山大,最终击退了特工,控制室恢复安静。他们急忙寻找关键文件,却发现亨德里克森已经提前离开。他们决定追踪他的下落,揭开更大的阴谋。
+
+10月11日下午,艾米丽、威廉、伊莎贝拉和亚历山大在厅堂中发现了一扇隐藏的地下室门。进入地下室后,他们发现里面摆满了锁链和刑具,墙壁上涂满了血迹。探索地下室时,他们发现了一个被组织抓来进行实验的男子。男子告诉他们更多组织的恶行,还提及有着一个更大的计划。他们决定帮助男子逃脱,揭露更多的真相。然而,特工们围住了地下室,他们必须想办法逃脱。
+
+经过艰辛调查和惊险行动,他们最终击败了特工并成功逮捕了亨德里克森。在他们彻底揭露组织的真相后,他们"
+现在,我们提供了一些新的剧情信息,可以用来完善原始摘要(如果有用的话)
+------------
+"10月11日晚上
+亚历山大和组织首领的决斗开始了,没人知道他们将如何收场。同时,艾米丽、威廉和伊莎贝拉被迫与亨德里克森激战,以保护他们的生命和揭露真相。战斗中,亚历山大的洞察力让他发现了首领的弱点,并成功将其击败。然而,就在他们以为胜利在望时,一枚定时炸弹被激活。伊莎贝拉紧急分析炸弹结构,却发现解除密码紧缺。三人必须快速决定,是拆除炸弹还是逃跑!"
+------------
+根据新的剧情,完善原始摘要,尽量完整包含所有情节内容,省略与情节无关的内容,最终的摘要不要太长
+如果新的剧情没有用处,返回原始摘要。
+最终的的摘要:`
+        ],
+        { maxRetries: 3 }
+    )
+    console.log(response.content)
+} catch (error) {
+    console.log(JSON.stringify(error, null, 4))
+    console.log(error.code)
+}

+ 0 - 149
pem.mjs

@@ -1,149 +0,0 @@
-import pem from 'pem'
-import { readFileSync } from 'fs'
-import crypto from 'crypto'
-import axios from 'axios'
-import { format, addSeconds } from 'date-fns'
-import qs from 'querystring'
-
-const privateKey = crypto.createPrivateKey({ key: readFileSync('certs/6888806043057.key') })
-const publicKey = crypto.createPublicKey({ key: readFileSync('certs/sand.crt') })
-
-function privSign(str) {
-    const signer = crypto.createSign('SHA1')
-    signer.update(str)
-    signer.end()
-    const signature = signer.sign(privateKey)
-    return signature.toString('base64')
-}
-
-function verifySign(str, sign) {
-    const verifier = crypto.createVerify('SHA1')
-    verifier.update(str)
-    verifier.end()
-    return verifier.verify(publicKey, Buffer.from(sign.replace(/\s/g, '+'), 'base64'))
-}
-
-async function request(header, body, url) {
-    const data = {
-        head: header,
-        body
-    }
-    const dataStr = JSON.stringify(data)
-    console.log(JSON.stringify(data, null, 4))
-    const sign = await privSign(dataStr)
-    const postBody = {
-        charset: 'UTF-8',
-        data: dataStr,
-        signType: '01',
-        sign,
-        extend: ''
-    }
-    console.log(qs.stringify(postBody))
-    return await axios.post(url, qs.stringify(postBody), {
-        headers: {
-            'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
-        },
-        timeout: 30000,
-        timeoutErrorMessage: '请求超时'
-    })
-}
-const algorithm = 'aes-128-ecb'
-async function pay() {
-    const data = {
-        cityNo: '',
-        productId: '00000004',
-        bankType: '',
-        payMode: '',
-        accNo: '6222024301070380165',
-        accName: '熊竹',
-        bankName: '',
-        remark: '消费',
-        channelType: '',
-        accAttr: '0',
-        version: '01',
-        timeOut: '20230506150701',
-        extend: '',
-        extendParams: '',
-        tranTime: '20230506150401',
-        provNo: '',
-        phone: '',
-        tranAmt: '000000000100',
-        reqReserved: '',
-        orderCode: new Date().getTime() + '',
-        accType: '4',
-        currencyCode: '156'
-    }
-    const dataStr = JSON.stringify(data)
-
-    // 加密密钥,必须是 16、24 或 32 个字符长(分别对应 AES-128、AES-192 或 AES-256)
-    const key = crypto.randomBytes(16)
-
-    // 加密算法使用 AES-256-ECB
-
-    // 将要加密的数据转换为一个 Buffer 对象
-    const dataBuffer = Buffer.from(dataStr, 'utf8')
-
-    // 创建一个加密器对象,使用 PKCS5Padding 进行补位
-    const cipher = crypto.createCipheriv(algorithm, key, null)
-    cipher.setAutoPadding(true)
-
-    // 加密数据,并返回加密后的 Buffer 对象
-    const encryptedBuffer = Buffer.concat([cipher.update(dataBuffer), cipher.final()])
-
-    // 将加密后的 Buffer 对象转换为十六进制字符串
-    const encrypted = encryptedBuffer.toString('base64')
-
-    const sign = await privSign(dataStr)
-
-    console.log(encrypted)
-
-    const encryptKey = crypto
-        .publicEncrypt(
-            {
-                key: publicKey,
-                padding: crypto.constants.RSA_PKCS1_PADDING
-            },
-            key
-        )
-        .toString('base64')
-    console.log(encryptKey)
-
-    const body = {
-        transCode: 'RTPM',
-        accessType: '0',
-        merId: '6888806043057',
-        encryptKey: encryptKey,
-        encryptData: encrypted,
-        sign,
-        extend: ''
-    }
-    const res = await axios.post('http://120.78.171.194:11223/agent-main/openapi/agentpay', qs.stringify(body))
-
-    const retData = qs.parse(qs.unescape(res.data))
-    console.log(retData)
-
-    const decryptKey = crypto.privateDecrypt(
-        {
-            key: privateKey,
-            padding: crypto.constants.RSA_PKCS1_PADDING
-        },
-        Buffer.from(retData.encryptKey.replace(/\s/g, '+'), 'base64')
-    )
-    console.log(decryptKey.toString())
-
-    const decipher = crypto.createDecipheriv(algorithm, decryptKey, null)
-    decipher.setAutoPadding(true)
-
-    const decryptedBuffer = Buffer.concat([
-        decipher.update(Buffer.from(retData.encryptData.replace(/\s/g, '+'), 'base64')),
-        decipher.final()
-    ])
-    const decrypted = decryptedBuffer.toString('utf8')
-    console.log(decrypted)
-
-    const verify = verifySign(decrypted, retData.sign)
-    console.log(verify)
-
-
-}
-await pay()

+ 0 - 4
sdfsdf.mjs

@@ -1,4 +0,0 @@
-import { format } from 'date-fns'
-import zh from 'date-fns/locale/zh-CN/index.js'
-
-console.log(format(new Date(), 'MMMdo', { locale: zh }))

+ 9 - 0
src/game/game.controller.ts

@@ -25,6 +25,7 @@ export class GameController {
     }
 
     @Get('/:id')
+    @Public()
     public async get(@Param('id') id: string) {
         return await this.gameService.findById(Number(id))
     }
@@ -55,6 +56,14 @@ export class GameController {
         @Body() body: { choice: string; genChoice: boolean; addCharactor: string }
     ) {
         const subject = new Subject<any>()
+        const self = this
+        subject.subscribe({
+            next: (value) => {
+                self.gameService.emitEvent(id, value)
+            },
+            error: (err) => console.log(err),
+            complete: () => console.log('complete')
+        })
         return await this.gameService.continue(Number(id), body.choice, body.genChoice, body.addCharactor, subject)
     }
 

+ 134 - 41
src/game/game.service.ts

@@ -32,7 +32,7 @@ import { zhCN } from 'date-fns/locale'
 import { GameStatus } from './enums/game-status.enum'
 import dedent from 'dedent'
 import { Serialized } from 'langchain/dist/load/serializable'
-import { HumanMessage, LLMResult, SystemMessage } from 'langchain/schema'
+import { BaseMessageLike, HumanMessage, LLMResult, SystemMessage } from 'langchain/schema'
 import { CallbackManager } from 'langchain/callbacks'
 import { setTimeout } from 'timers/promises'
 import { EventsGateway } from '../events/events.gateway'
@@ -48,6 +48,13 @@ import { InitGame } from './dto/init-game.dto'
 export class GameService implements OnModuleInit {
     private readonly logger = new Logger(DanmuService.name)
     private llm: ChatOpenAI
+    private gpt35: ChatOpenAI
+    private gpt4: ChatOpenAI
+    private gpt35CF: ChatOpenAI
+    private gpt4CF: ChatOpenAI
+    private gpt35Azure: ChatOpenAI
+    private gpt4Azure: ChatOpenAI
+
     private controllers: Map<number, RunningController> = new Map()
 
     constructor(
@@ -67,31 +74,79 @@ export class GameService implements OnModuleInit {
 
     async onModuleInit() {
         const self = this
-        this.llm = new ChatOpenAI({
+        const cb = CallbackManager.fromHandlers({
+            async handleLLMStart(llm: Serialized, prompts: string[]) {
+                self.logger.log(`[LLM Start]LLM: ${JSON.stringify(llm)}`)
+                self.logger.log(`['LLM Start]Prompts: ${prompts.join('\n')}`)
+            },
+            async handleLLMEnd(output: LLMResult) {
+                self.logger.log(
+                    `[LLM End]${output.generations
+                        .reduce((acc, cur) => acc.concat(cur), [])
+                        .map((i) => i.text)
+                        .join('\n')}`
+                )
+                self.logger.log(`[LLM End]${JSON.stringify(output.llmOutput)}`)
+            },
+            async handleLLMError(error: Error) {
+                self.logger.error(error)
+            }
+        })
+
+        const onFailedAttempt = (error) => {
+            self.logger.error(error)
+        }
+
+        const baseParam = {
+            openAIApiKey: process.env.OPENAI_API_KEY,
+            modelName: 'gpt-3.5-turbo-0613',
+            callbackManager: cb,
+            onFailedAttempt
+        }
+
+        const azureBaseParam = {
             // openAIApiKey: process.env.OPENAI_API_KEY,
             // modelName: 'gpt-3.5-turbo-0613',
             azureOpenAIApiKey: process.env.AZURE_OPENAI_KEY,
             azureOpenAIApiVersion: process.env.AZURE_OPENAI_VERSION,
             azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_INSTANCE,
             azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_DEPLOYMENT,
-            callbackManager: CallbackManager.fromHandlers({
-                async handleLLMStart(llm: Serialized, prompts: string[]) {
-                    self.logger.log(`[LLM Start]LLM: ${JSON.stringify(llm)}`)
-                    self.logger.log(`['LLM Start]Prompts: ${prompts.join('\n')}`)
-                },
-                async handleLLMEnd(output: LLMResult) {
-                    self.logger.log(
-                        `[LLM End]${output.generations
-                            .reduce((acc, cur) => acc.concat(cur), [])
-                            .map((i) => i.text)
-                            .join('\n')}`
-                    )
-                    self.logger.log(`[LLM End]${JSON.stringify(output.llmOutput)}`)
-                },
-                async handleLLMError(error: Error) {
-                    Logger.error(error)
-                }
-            })
+            callbackManager: cb,
+            onFailedAttempt
+        }
+
+        this.llm = new ChatOpenAI({
+            ...azureBaseParam
+        })
+        this.gpt35 = new ChatOpenAI({
+            ...baseParam,
+            modelName: 'gpt-3.5-turbo-0613'
+        })
+        this.gpt4 = new ChatOpenAI({
+            ...baseParam,
+            modelName: 'gpt-4'
+        })
+        this.gpt35CF = new ChatOpenAI({
+            ...baseParam,
+            modelName: 'gpt-3.5-turbo-0613',
+            configuration: {
+                baseURL: 'https://openai.c8c.top/v1'
+            }
+        })
+        this.gpt4CF = new ChatOpenAI({
+            ...baseParam,
+            modelName: 'gpt-4',
+            configuration: {
+                baseURL: 'https://openai.c8c.top/v1'
+            }
+        })
+        this.gpt35Azure = new ChatOpenAI({
+            ...azureBaseParam,
+            azureOpenAIApiDeploymentName: 'gpt-35-turbo'
+        })
+        this.gpt4Azure = new ChatOpenAI({
+            ...azureBaseParam,
+            azureOpenAIApiDeploymentName: 'gpt-4'
         })
     }
 
@@ -130,6 +185,38 @@ export class GameService implements OnModuleInit {
         return await paginate<Charactor>(this.charactorRepository, req.page, req.search)
     }
 
+    async callLLM(messages: BaseMessageLike[], options?, callbacks?) {
+        const { value: model } = await this.sysConfigService.findByName('model')
+        let llm: ChatOpenAI
+        switch (model) {
+            case 'gpt-3.5':
+                llm = this.gpt35
+                break
+            case 'gpt-4':
+                llm = this.gpt4
+                break
+            case 'cf-gpt-3.5':
+                llm = this.gpt35CF
+                break
+            case 'cf-gpt-4':
+                llm = this.gpt4CF
+                break
+            case 'azure-gpt-3.5':
+                llm = this.gpt35Azure
+                break
+            case 'azure-gpt-4':
+                llm = this.gpt4Azure
+                break
+            default:
+                llm = this.llm
+        }
+        return await llm.call(messages, options, callbacks)
+    }
+
+    async emitEvent(id: string, event: any) {
+        this.eventsGateway.emitEvent(id, event)
+    }
+
     async genCharactor(background: string, num: number) {
         const parser = StructuredOutputParser.fromZodSchema(
             z.array(
@@ -153,7 +240,7 @@ export class GameService implements OnModuleInit {
             num,
             background
         })
-        const response = await this.llm.call([new HumanMessage(input)])
+        const response = await this.callLLM([new HumanMessage(input)])
         const output = await parser.parse(response.content)
         return output
     }
@@ -259,7 +346,7 @@ export class GameService implements OnModuleInit {
                 charactors: this.formatCharactors(gameState.charactors),
                 datetime: this.formatDatetime(gameState.date, gameState.time)
             })
-            const response = await this.llm.call([new HumanMessage(input)])
+            const response = await this.callLLM([new HumanMessage(input)])
             gameState.plot = response.content
         }
 
@@ -345,7 +432,7 @@ export class GameService implements OnModuleInit {
                 : ''
         })
 
-        const response = await this.llm.call([new HumanMessage(input)])
+        const response = await this.callLLM([new HumanMessage(input)])
         plot = response.content
 
         subject.next({
@@ -437,7 +524,7 @@ export class GameService implements OnModuleInit {
             summary: summary
         })
 
-        const response = await this.llm.call([new HumanMessage(input)])
+        const response = await this.callLLM([new HumanMessage(input)])
         plot = response.content
 
         subject.next({
@@ -518,7 +605,7 @@ export class GameService implements OnModuleInit {
             while (!stop) {
                 const history: GameState[] = await this.getHistory(game.currentState, [])
                 const lastState = history[history.length - 1]
-                if (lastState.options && lastState.options.length > 0) {
+                if (!choice && lastState.options && lastState.options.length > 0) {
                     choice = lastState.options[Math.floor(Math.random() * lastState.options.length)].content
                 }
                 const subject = new Subject<GameEvent>()
@@ -526,17 +613,13 @@ export class GameService implements OnModuleInit {
                 const self = this
                 subject.subscribe({
                     async next(evt) {
+                        self.emitEvent(`${id}`, evt)
                         switch (evt.type) {
                             case 'plot':
                                 self.logger.log('生成剧情')
                                 break
                             case 'options':
-                                self.makeVotes(
-                                    id,
-                                    evt.data.map((i) => i.content),
-                                    new Date(),
-                                    voteSubject
-                                )
+                                self.makeVotes(id, evt.data, new Date(), voteSubject)
                                 break
                             case 'newCharactor':
                                 break
@@ -562,6 +645,7 @@ export class GameService implements OnModuleInit {
                 }
                 this.logger.log('等待投票结束')
                 choice = await lastValueFrom(voteSubject)
+                this.logger.log(`选择: ${choice}`)
                 addCharactor = null
                 if (lastState.charactors.filter((i) => !i.dead).length < 8) {
                     const danmu = await this.danmuService.pickAsCharactor(id, lastState.createdAt)
@@ -581,23 +665,32 @@ export class GameService implements OnModuleInit {
         }
     }
 
-    async makeVotes(gameId: number, options: string[], startTime: Date, subject: Subject<string>) {
+    async makeVotes(gameId: number, options: any[], startTime: Date, subject: Subject<string>) {
         try {
             this.logger.log('发起投票')
             let s = differenceInSeconds(new Date(), startTime)
-            while (s < 60) {
-                const votes = await this.danmuService.collectVotes(gameId, startTime, options.length)
+            while (s < 30) {
                 await setTimeout(1000)
+                const votes = await this.danmuService.collectVotes(gameId, startTime, options.length)
+                this.emitEvent(`${gameId}`, { type: 'votes', data: votes })
                 this.logger.log(`${s} 投票进度${JSON.stringify(votes)}`)
                 s = differenceInSeconds(new Date(), startTime)
             }
             const votes = await this.danmuService.collectVotes(gameId, startTime, options.length)
+            this.emitEvent(`${gameId}`, { type: 'votes', data: votes })
             this.logger.log(`投票结束: ${JSON.stringify(votes)}`)
             const max = Math.max(...votes)
             const indices = votes.map((i, index) => (i === max ? index : -1)).filter((i) => i >= 0)
             const indexOfMaxValue = indices[Math.floor(Math.random() * indices.length)]
-            this.logger.log(`选择: ${indexOfMaxValue} - ${options[indexOfMaxValue]}`)
-            subject.next(options[indexOfMaxValue])
+            this.emitEvent(`${gameId}`, {
+                type: 'voteResult',
+                data: {
+                    index: indexOfMaxValue,
+                    option: options[indexOfMaxValue]
+                }
+            })
+            this.logger.log(`选择: ${indexOfMaxValue} - ${JSON.stringify(options[indexOfMaxValue])}`)
+            subject.next(options[indexOfMaxValue].content)
         } catch (error) {
             this.logger.error(error)
             subject.error(error)
@@ -631,8 +724,8 @@ export class GameService implements OnModuleInit {
                 const input = await prompt.format({
                     text
                 })
-                const response = await this.llm.call([new HumanMessage(input)])
-                const output = await parser.parse(response.content)
+                const response = await this.callLLM([new HumanMessage(input)])
+                const output = await parser.parse(response.content.replace('+', ''))
                 return output.map((i) => {
                     i.modifyHp = i.modifyHp.filter((j) => charactors.find((k) => k.name === j.name))
                     return i
@@ -686,7 +779,7 @@ export class GameService implements OnModuleInit {
                     .join('\n\n')
             })
         }
-        const response = await this.llm.call([new HumanMessage(input)])
+        const response = await this.callLLM([new HumanMessage(input)])
 
         return response.content
     }
@@ -719,7 +812,7 @@ export class GameService implements OnModuleInit {
                     base,
                     background: game.background
                 })
-                const response = await this.llm.call([
+                const response = await this.callLLM([
                     new SystemMessage('你是一个富有想象力的写作助手,你的任务是帮我想象一个小说里的角色。'),
                     new HumanMessage(input)
                 ])
@@ -757,7 +850,7 @@ export class GameService implements OnModuleInit {
                     summary,
                     text: newPlot
                 })
-                const response = await this.llm.call([
+                const response = await this.callLLM([
                     new SystemMessage('你来扮演一个文字冒险游戏,你现在的任务是根据当前剧情随机调整角色的HP值'),
                     new HumanMessage(input)
                 ])

+ 2 - 0
src/room/room.controller.ts

@@ -2,6 +2,7 @@ import { Body, Controller, Delete, Get, Param, Post, Put } from '@nestjs/common'
 import { PageRequest } from 'src/common/dto/page-request'
 import { RoomService } from './room.service'
 import { Room } from './entities/room.entity'
+import { Public } from '../auth/public.decorator'
 
 @Controller('room')
 export class RoomController {
@@ -17,6 +18,7 @@ export class RoomController {
     }
 
     @Get('/:id')
+    @Public()
     public async get(@Param('id') id: string) {
         return await this.roomService.findById(Number(id))
     }