|
|
@@ -14,10 +14,12 @@ import * as dedent from 'dedent'
|
|
|
import { chatReplyProcess } from '../chat/chatgpt'
|
|
|
import { fetchSSE } from '../chatapi/fetch-sse'
|
|
|
import { v4 as uuidv4 } from 'uuid'
|
|
|
+import Redis from 'ioredis'
|
|
|
|
|
|
@Injectable()
|
|
|
export class OrgService {
|
|
|
private readonly openai: OpenAIApi
|
|
|
+ private readonly redis: Redis
|
|
|
constructor(
|
|
|
@InjectRepository(Org)
|
|
|
private readonly orgRepository: Repository<Org>,
|
|
|
@@ -28,14 +30,16 @@ export class OrgService {
|
|
|
) {
|
|
|
this.openai = new OpenAIApi(
|
|
|
new Configuration({
|
|
|
- apiKey: 'beb32e4625a94b65ba8bc0ba1688c4d2',
|
|
|
+ apiKey: process.env.AZURE_OPENAI_KEY,
|
|
|
// add azure info into configuration
|
|
|
azure: {
|
|
|
- apiKey: 'beb32e4625a94b65ba8bc0ba1688c4d2',
|
|
|
- endpoint: 'https://zouma.openai.azure.com'
|
|
|
+ apiKey: process.env.AZURE_OPENAI_KEY,
|
|
|
+ endpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
|
|
+ deploymentName: 'gpt-35-turbo-16k'
|
|
|
}
|
|
|
})
|
|
|
)
|
|
|
+ this.redis = new Redis(process.env.REDIS_URI)
|
|
|
}
|
|
|
|
|
|
async findByUrl(url: string): Promise<Org> {
|
|
|
@@ -105,7 +109,13 @@ export class OrgService {
|
|
|
await this.userService.updateUser(orgUser.userId, { orgId: null })
|
|
|
}
|
|
|
|
|
|
- async buildMessages(question: string, orgId: number, knowledgeId?: number, fileId?: number): Promise<any[]> {
|
|
|
+ async buildMessages(
|
|
|
+ question: string,
|
|
|
+ orgId: number,
|
|
|
+ parentMessageId?: string,
|
|
|
+ knowledgeId?: number,
|
|
|
+ fileId?: number
|
|
|
+ ): Promise<any[]> {
|
|
|
const org = await this.findById(orgId)
|
|
|
const context = await this.knowledgeService.askKnowledge(question, orgId, knowledgeId, fileId)
|
|
|
if (org.questionTemplate) {
|
|
|
@@ -118,6 +128,13 @@ export class OrgService {
|
|
|
content: org.systemPrompt.replace('${context}', context.join('\n')).replace('${question}', question)
|
|
|
})
|
|
|
}
|
|
|
+ if (parentMessageId) {
|
|
|
+ let history = (await this.getChatHistory(parentMessageId)).map((i) => ({
|
|
|
+ role: i.role,
|
|
|
+ content: i.text
|
|
|
+ }))
|
|
|
+ messages.push(...history)
|
|
|
+ }
|
|
|
if (!/\$\{context\}/.test(org.systemPrompt)) {
|
|
|
messages.push({
|
|
|
role: 'user',
|
|
|
@@ -148,13 +165,52 @@ export class OrgService {
|
|
|
return messages
|
|
|
}
|
|
|
|
|
|
- async ask(question: string, orgId: number, knowledgeId?: number, fileId?: number) {
|
|
|
+ async getChatHistory(parentMessageId, history = []) {
|
|
|
+ let parent: any = await this.redis.get(parentMessageId)
|
|
|
+ if (!parent) {
|
|
|
+ return history
|
|
|
+ }
|
|
|
+ parent = JSON.parse(parent)
|
|
|
+ history.unshift(parent)
|
|
|
+ if (parent.parentMessageId) {
|
|
|
+ return await this.getChatHistory(parent.parentMessageId, history)
|
|
|
+ }
|
|
|
+ return history
|
|
|
+ }
|
|
|
+
|
|
|
+ async ask(question: string, orgId: number, parentMessageId?: string, knowledgeId?: number, fileId?: number) {
|
|
|
try {
|
|
|
const response = await this.openai.createChatCompletion({
|
|
|
- model: 'gpt35',
|
|
|
- messages: await this.buildMessages(question, orgId, knowledgeId, fileId)
|
|
|
+ model: 'gpt-35-turbo-16k',
|
|
|
+ messages: await this.buildMessages(question, orgId, parentMessageId, knowledgeId, fileId),
|
|
|
+ temperature: 0.1,
|
|
|
})
|
|
|
- return { answer: response.data.choices[0].message.content }
|
|
|
+ const id = uuidv4()
|
|
|
+ await this.redis.set(
|
|
|
+ id,
|
|
|
+ JSON.stringify({
|
|
|
+ role: 'user',
|
|
|
+ id,
|
|
|
+ parentMessageId,
|
|
|
+ text: question
|
|
|
+ })
|
|
|
+ )
|
|
|
+ await this.redis.set(
|
|
|
+ response.data.id,
|
|
|
+ JSON.stringify({
|
|
|
+ role: 'assistant',
|
|
|
+ id: response.data.id,
|
|
|
+ parentMessageId: id,
|
|
|
+ text: response.data.choices[0].message.content
|
|
|
+ })
|
|
|
+ )
|
|
|
+ return {
|
|
|
+ role: 'assistant',
|
|
|
+ id: response.data.id,
|
|
|
+ text: response.data.choices[0].message.content,
|
|
|
+ detail: response.data
|
|
|
+ }
|
|
|
+ // return { answer: response.data.choices[0].message.content }
|
|
|
} catch (error) {
|
|
|
Logger.error(error.message)
|
|
|
if (error.response) {
|
|
|
@@ -164,7 +220,15 @@ export class OrgService {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- async streamAsk(req, res, question: string, orgId: number, knowledgeId?: number, fileId?: number) {
|
|
|
+ async streamAsk(
|
|
|
+ req,
|
|
|
+ res,
|
|
|
+ question: string,
|
|
|
+ orgId: number,
|
|
|
+ parentMessageId?: string,
|
|
|
+ knowledgeId?: number,
|
|
|
+ fileId?: number
|
|
|
+ ) {
|
|
|
res.setHeader('Content-type', 'application/octet-stream')
|
|
|
try {
|
|
|
try {
|
|
|
@@ -177,7 +241,7 @@ export class OrgService {
|
|
|
}
|
|
|
await fetchSSE(url, {
|
|
|
body: JSON.stringify({
|
|
|
- messages: await this.buildMessages(question, orgId, knowledgeId, fileId),
|
|
|
+ messages: await this.buildMessages(question, orgId, parentMessageId, knowledgeId, fileId),
|
|
|
stream: true
|
|
|
}),
|
|
|
headers: {
|