|
|
@@ -1,5 +1,5 @@
|
|
|
import { ForbiddenException, Injectable, InternalServerErrorException, Logger } from '@nestjs/common'
|
|
|
-import { Observable } from 'rxjs'
|
|
|
+import { Observable, interval } from 'rxjs'
|
|
|
import { ChatGPTAPI, ChatMessage } from '../chatapi'
|
|
|
import type { RequestProps } from './types'
|
|
|
import { chatReplyProcess } from './chatgpt'
|
|
|
@@ -27,7 +27,7 @@ export class ChatService {
|
|
|
private readonly membershipService: MembershipService,
|
|
|
private readonly httpService: HttpService,
|
|
|
private readonly sysConfigService: SysConfigService
|
|
|
- ) {}
|
|
|
+ ) { }
|
|
|
|
|
|
public chat(req, res): Observable<any> {
|
|
|
res.setHeader('Content-Type', 'application/octet-stream')
|
|
|
@@ -50,7 +50,7 @@ export class ChatService {
|
|
|
temperature,
|
|
|
top_p
|
|
|
})
|
|
|
- .then(() => {})
|
|
|
+ .then(() => { })
|
|
|
.catch((error) => {
|
|
|
observer.error(error)
|
|
|
})
|
|
|
@@ -128,44 +128,43 @@ export class ChatService {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- async sendMessage() {
|
|
|
- const message = '请你发一条此时此刻的动态文案';
|
|
|
+ public async sendMessage(prompt: string, message: string,): Promise<string> {
|
|
|
try {
|
|
|
- const result = await chatReplyProcess({
|
|
|
- message: message,
|
|
|
- process: (chat: ChatMessage) => {},
|
|
|
- });
|
|
|
- const chatMessage = result.data as ChatMessage;
|
|
|
- this.chatHistoryRepository.save(
|
|
|
- new ChatHistory({
|
|
|
- messageId: chatMessage.parentMessageId,
|
|
|
- parentMessageId: null,
|
|
|
- userId: 0,
|
|
|
- message: message,
|
|
|
- role: 'system',
|
|
|
- token: chatMessage.detail.usage.prompt_tokens,
|
|
|
- time: new Date(),
|
|
|
- }),
|
|
|
- );
|
|
|
- this.chatHistoryRepository.save(
|
|
|
- new ChatHistory({
|
|
|
- messageId: chatMessage.id,
|
|
|
- parentMessageId: chatMessage.parentMessageId,
|
|
|
- userId: 0,
|
|
|
- message: chatMessage.text,
|
|
|
- role: 'assistant',
|
|
|
- token: chatMessage.detail.usage.completion_tokens,
|
|
|
- time: new Date(),
|
|
|
- }),
|
|
|
- );
|
|
|
- Logger.log(`机器人回答:${chatMessage.text}`, 'SendMessage');
|
|
|
- } catch (error) {
|
|
|
- Logger.error(error, 'SendMessage');
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
+ const result = await chatReplyProcess({
|
|
|
+ message: prompt,
|
|
|
+ systemMessage: message,
|
|
|
+ process: (chat: ChatMessage) => { },
|
|
|
+ });
|
|
|
+ const chatMessage = result.data as ChatMessage;
|
|
|
+ this.chatHistoryRepository.save(
|
|
|
+ new ChatHistory({
|
|
|
+ messageId: chatMessage.parentMessageId,
|
|
|
+ parentMessageId: null,
|
|
|
+ userId: 0,
|
|
|
+ message: message,
|
|
|
+ role: 'system',
|
|
|
+ token: chatMessage.detail.usage.prompt_tokens,
|
|
|
+ time: new Date(),
|
|
|
+ }),
|
|
|
+ );
|
|
|
+ this.chatHistoryRepository.save(
|
|
|
+ new ChatHistory({
|
|
|
+ messageId: chatMessage.id,
|
|
|
+ parentMessageId: chatMessage.parentMessageId,
|
|
|
+ userId: 0,
|
|
|
+ message: chatMessage.text,
|
|
|
+ role: 'assistant',
|
|
|
+ token: chatMessage.detail.usage.completion_tokens,
|
|
|
+ time: new Date(),
|
|
|
+ }),
|
|
|
+ );
|
|
|
|
|
|
+ Logger.log(`机器人回答:${chatMessage.text}`, 'SendMessage');
|
|
|
+ return chatMessage.text
|
|
|
+ } catch (error) {
|
|
|
+ Logger.error(error, 'SendMessage');
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
public async chatProxy(req) {
|
|
|
const url = `${process.env.AZURE_OPENAI_ENDPOINT}/openai/deployments/${process.env.AZURE_OPENAI_DEPLOYMENT}/chat/completions?api-version=${process.env.AZURE_OPENAI_VERSION}`
|
|
|
@@ -203,8 +202,8 @@ export class ChatService {
|
|
|
this.tiktokenAndSave(
|
|
|
req.user.id,
|
|
|
req.body.messages.map((message) => `${message.role}:\n${message.content}`).join('\n\n') +
|
|
|
- '\n\nassistant:\n' +
|
|
|
- text
|
|
|
+ '\n\nassistant:\n' +
|
|
|
+ text
|
|
|
)
|
|
|
return subscriber.complete()
|
|
|
}
|