xiongzhu 2 vuotta sitten
vanhempi
commit
c89338f1e2

+ 2 - 1
.env

@@ -51,6 +51,7 @@ ALIYUN_SMS_TEMPLATE_CODE=SMS_175485688
 # AZURE_OPENAI_KEY=beb32e4625a94b65ba8bc0ba1688c4d2
 # AZURE_OPENAI_ENDPOINT=https://zouma.openai.azure.com
 AZURE_OPENAI_KEY=62dd8a1466524c64967810c692f0197e
+AZURE_OPENAI_INSTANCE=zouma1
 AZURE_OPENAI_ENDPOINT=https://zouma1.openai.azure.com
 AZURE_OPENAI_DEPLOYMENT=gpt-35-turbo
 AZURE_OPENAI_VERSION=2023-03-15-preview
@@ -58,7 +59,7 @@ AZURE_OPENAI_VERSION=2023-03-15-preview
 AZURE_EMBEDDING_KEY=beb32e4625a94b65ba8bc0ba1688c4d2
 AZURE_EMBEDDING_INSTANCE=zouma
 AZURE_EMBEDDING_DEPLOYMENT=embedding
-AZURE_OPENAI_VERSION=2023-03-15-preview
+AZURE_EMBEDDING_VERSION=2023-03-15-preview
 
 OPENAI_API_KEY=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
 

+ 10 - 26
langChain.mjs

@@ -17,31 +17,15 @@ const loader1 = new UnstructuredLoader('/Users/drew/Downloads/客服的副本.pd
     apiUrl: 'http://192.168.6.19:8000/general/v0/general'
 })
 const docs1 = await loader1.load()
-console.log(docs1)
+console.log(docs1.length)
+import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'
 
-const embeddings = new OpenAIEmbeddings({
-    azureOpenAIApiKey: 'beb32e4625a94b65ba8bc0ba1688c4d2',
-    azureOpenAIApiInstanceName: 'zouma',
-    azureOpenAIApiDeploymentName: 'embedding',
-    azureOpenAIApiVersion: '2023-03-15-preview',
-    verbose: true
+const text = `Hi.\n\nI'm Harrison.\n\nHow? Are? You?\nOkay then f f f f.
+This is a weird text to write, but gotta test the splittingggg some how.\n\n
+Bye!\n\n-H.`
+const splitter = new RecursiveCharacterTextSplitter({
+    chunkSize: 200,
+    chunkOverlap: 1
 })
-
-const typeormVectorStore = await TypeORMVectorStore.fromDataSource(embeddings, {
-    postgresConnectionOptions: {
-        type: 'postgres',
-        host: process.env.PG_HOST,
-        port: process.env.PG_PORT,
-        username: process.env.PG_USERNAME,
-        password: process.env.PG_PASSWORD,
-        database: process.env.PG_DATABASE
-    },
-    verbose: true
-})
-
-await typeormVectorStore.ensureTableInDatabase()
-
-await typeormVectorStore.addDocuments(docs1)
-
-const results = await typeormVectorStore.similaritySearch('包邮', 2)
-console.log(results)
+const docOutput = await splitter.splitDocuments(docs1)
+console.log(docOutput.length)

+ 1 - 0
package.json

@@ -63,6 +63,7 @@
     "fastq": "^1.15.0",
     "handlebars": "^4.7.7",
     "hbs": "^4.2.0",
+    "hnswlib-node": "^2.0.0",
     "ioredis": "^5.3.2",
     "isomorphic-fetch": "^3.0.0",
     "keyv": "^4.5.2",

+ 45 - 0
src/knowledge-base/embedding.ts

@@ -0,0 +1,45 @@
+import { Logger } from '@nestjs/common'
+import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
+import * as queue from 'fastq'
+import { setTimeout } from 'timers/promises'
+
+const chunkArray = (arr, chunkSize) =>
+    arr.reduce((chunks, elem, index) => {
+        const chunkIndex = Math.floor(index / chunkSize)
+        const chunk = chunks[chunkIndex] || []
+        // eslint-disable-next-line no-param-reassign
+        chunks[chunkIndex] = chunk.concat([elem])
+        return chunks
+    }, [])
+
+export class OpenAIParallelEmbeddings extends OpenAIEmbeddings {
+    override async embedDocuments(texts: string[]) {
+        const subPrompts = chunkArray(
+            this.stripNewLines ? texts.map((t) => t.replace(/\n/g, ' ')) : texts,
+            this.batchSize
+        )
+        const embeddings = []
+        const self = this
+        async function worker({ i, input }) {
+            try {
+                const { data } = await self['embeddingWithRetry']({
+                    model: self.modelName,
+                    input
+                })
+                embeddings[i] = data.data[0].embedding
+                Logger.log(`create embedding for ${i + 1}/${subPrompts.length}`)
+            } catch (error) {
+                Logger.error(error)
+            }
+        }
+        const q = queue.promise(worker, 8)
+        subPrompts.forEach((item, index) => {
+            q.push({
+                input: item,
+                i: index
+            })
+        })
+        await q.drained()
+        return embeddings
+    }
+}

+ 11 - 0
src/knowledge-base/entities/documents.entity.ts

@@ -0,0 +1,11 @@
+import { Model } from 'sequelize'
+
+export class Documents extends Model {
+    id: string
+
+    pageContent: number
+
+    metadata: object
+
+    embedding: string
+}

+ 192 - 129
src/knowledge-base/knowledge-base.service.ts

@@ -14,6 +14,7 @@ import { Configuration, OpenAIApi } from 'azure-openai'
 import { DataTypes, Sequelize } from 'sequelize'
 import { ConfigService } from '@nestjs/config'
 import { KnowledgeEmbedding } from './entities/knowledge-embedding.entity'
+import { Documents } from './entities/documents.entity'
 import { VECTOR } from '../utils/pgvector'
 import * as queue from 'fastq'
 import { setTimeout } from 'timers/promises'
@@ -27,10 +28,18 @@ import { FileStatus } from './enums/file-status.enum'
 import xlsx from 'node-xlsx'
 import * as mime from 'mime'
 import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
+import { ChatOpenAI } from 'langchain/chat_models/openai'
+import { HumanMessage } from 'langchain/schema'
 import { TypeORMVectorStore } from 'langchain/vectorstores/typeorm'
 import { UnstructuredLoader } from 'langchain/document_loaders/fs/unstructured'
 import { mkdtempSync, unlinkSync, rmdirSync, writeFileSync, rmSync } from 'fs'
 import { join } from 'path'
+import { OpenAIParallelEmbeddings } from './embedding'
+import { ConversationalRetrievalQAChain } from 'langchain/chains'
+import { HNSWLib } from 'langchain/vectorstores/hnswlib'
+import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'
+import { BufferMemory } from 'langchain/memory'
+import { PromptTemplate } from 'langchain/prompts'
 function formatEmbedding(embedding: number[]) {
     return `[${embedding.join(', ')}]`
 }
@@ -38,10 +47,9 @@ function formatEmbedding(embedding: number[]) {
 @Injectable()
 export class KnowledgeBaseService implements OnModuleInit {
     private readonly tokenizer: Tiktoken
-    private readonly openai: OpenAIApi
-    private readonly embeddingApi: OpenAIApi
+    private embeddingModel: OpenAIParallelEmbeddings
+    private chatModel: ChatOpenAI
     private readonly sequelize: Sequelize
-    private embeddings: OpenAIEmbeddings
     private vectorStore: TypeORMVectorStore
     constructor(
         @InjectRepository(KnowledgeBase)
@@ -52,28 +60,6 @@ export class KnowledgeBaseService implements OnModuleInit {
         private readonly fileService: FileService
     ) {
         this.tokenizer = get_encoding('cl100k_base')
-        this.openai = new OpenAIApi(
-            new Configuration({
-                apiKey: process.env.AZURE_OPENAI_KEY,
-                // add azure info into configuration
-                azure: {
-                    apiKey: process.env.AZURE_OPENAI_KEY,
-                    endpoint: process.env.AZURE_OPENAI_ENDPOINT,
-                    deploymentName: process.env.AZURE_OPENAI_DEPLOYMENT
-                }
-            })
-        )
-        this.embeddingApi = new OpenAIApi(
-            new Configuration({
-                apiKey: process.env.AZURE_EMBEDDING_KEY,
-                // add azure info into configuration
-                azure: {
-                    apiKey: process.env.AZURE_EMBEDDING_KEY,
-                    endpoint: `https://${process.env.AZURE_EMBEDDING_INSTANCE}.openai.azure.com`,
-                    deploymentName: process.env.AZURE_EMBEDDING_DEPLOYMENT
-                }
-            })
-        )
         this.sequelize = new Sequelize({
             dialect: 'postgres',
             host: configService.get<string>('PG_HOST'),
@@ -84,6 +70,9 @@ export class KnowledgeBaseService implements OnModuleInit {
             // logging: (msg) => Logger.debug(msg, 'Sequelize')
             logging: false
         })
+    }
+
+    async onModuleInit() {
         KnowledgeEmbedding.init(
             {
                 id: {
@@ -117,19 +106,46 @@ export class KnowledgeBaseService implements OnModuleInit {
             },
             { sequelize: this.sequelize }
         )
+        Documents.init(
+            {
+                id: {
+                    primaryKey: true,
+                    type: DataTypes.UUIDV4,
+                    defaultValue: 'uuid_generate_v4()'
+                },
+                pageContent: {
+                    type: DataTypes.TEXT
+                },
+                metadata: {
+                    type: DataTypes.JSONB
+                },
+                embedding: {
+                    type: new VECTOR(1536)
+                }
+            },
+            { sequelize: this.sequelize, tableName: 'documents' }
+        )
         this.sequelize.sync()
-    }
 
-    async onModuleInit() {
-        this.embeddings = new OpenAIEmbeddings({
+        this.embeddingModel = new OpenAIParallelEmbeddings({
             azureOpenAIApiKey: process.env.AZURE_EMBEDDING_KEY,
             azureOpenAIApiInstanceName: process.env.AZURE_EMBEDDING_INSTANCE,
             azureOpenAIApiDeploymentName: process.env.AZURE_EMBEDDING_DEPLOYMENT,
+            azureOpenAIApiVersion: process.env.AZURE_EMBEDDING_VERSION,
+            azureOpenAIApiEmbeddingsDeploymentName: process.env.AZURE_EMBEDDING_DEPLOYMENT,
+            maxRetries: 5,
+            timeout: 15000
+        })
+        this.chatModel = new ChatOpenAI({
+            azureOpenAIApiKey: process.env.AZURE_OPENAI_KEY,
+            azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_INSTANCE,
+            azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_DEPLOYMENT,
             azureOpenAIApiVersion: process.env.AZURE_OPENAI_VERSION,
-            verbose: true,
-            maxConcurrency: 8
+            maxRetries: 5,
+            timeout: 15000,
+            temperature: 0.1
         })
-        this.vectorStore = await TypeORMVectorStore.fromDataSource(this.embeddings, {
+        this.vectorStore = await TypeORMVectorStore.fromDataSource(this.embeddingModel, {
             postgresConnectionOptions: {
                 type: 'postgres',
                 host: process.env.PG_HOST,
@@ -163,6 +179,13 @@ export class KnowledgeBaseService implements OnModuleInit {
                 knowledgeId: knowledgeBaseId
             }
         })
+        await Documents.destroy({
+            where: {
+                metadata: {
+                    knowledgeId: knowledgeBaseId
+                }
+            }
+        })
     }
 
     async getKnowledgeBaseById(knowledgeBaseId: number): Promise<KnowledgeBase> {
@@ -184,6 +207,13 @@ export class KnowledgeBaseService implements OnModuleInit {
                 fileId: knowledgeFileId
             }
         })
+        await Documents.destroy({
+            where: {
+                metadata: {
+                    fileId: knowledgeFileId
+                }
+            }
+        })
     }
 
     public async uploadKnowledgeFile(file: Express.Multer.File, knowledgeId: number) {
@@ -216,76 +246,77 @@ export class KnowledgeBaseService implements OnModuleInit {
                 this.processExcelKnowledgeFile(knowledgeFile, buffer)
                 break
             case 'application/pdf':
-                this.processPdfKnowledgeFile(knowledgeFile, buffer)
-                // this.processPdfKnowledgeFile1(knowledgeFile, buffer)
+                // this.processPdfKnowledgeFile(knowledgeFile, buffer)
+                this.processFile(knowledgeFile, buffer)
                 break
         }
         return knowledgeFile
     }
 
-    public async processPdfKnowledgeFile(knowledgeFile: KnowledgeFile, buffer: Buffer) {
-        knowledgeFile.status = FileStatus.PROCESSING
-        try {
-            await this.knowledgeFileRepository.save(knowledgeFile)
-            const pdf = await PdfParse(buffer)
-            const contents = []
-            let paragraph = ''
-            pdf.text
-                .trim()
-                .split('\n')
-                .forEach((line) => {
-                    line = line.trim()
-                    paragraph += line
-                    if (this.isFullSentence(line)) {
-                        contents.push(paragraph)
-                        paragraph = ''
-                    }
-                })
-            if (paragraph) {
-                contents.push(paragraph)
-            }
+    // public async processPdfKnowledgeFile(knowledgeFile: KnowledgeFile, buffer: Buffer) {
+    //     knowledgeFile.status = FileStatus.PROCESSING
+    //     try {
+    //         await this.knowledgeFileRepository.save(knowledgeFile)
+    //         const pdf = await PdfParse(buffer)
+    //         const contents = []
+    //         let paragraph = ''
+    //         pdf.text
+    //             .trim()
+    //             .split('\n')
+    //             .forEach((line) => {
+    //                 line = line.trim()
+    //                 paragraph += line
+    //                 if (this.isFullSentence(line)) {
+    //                     contents.push(paragraph)
+    //                     paragraph = ''
+    //                 }
+    //             })
+    //         if (paragraph) {
+    //             contents.push(paragraph)
+    //         }
 
-            const embeddings = await this.createEmbeddings(
-                contents.map((i) => {
-                    return { text: i }
-                })
-            )
-            Logger.log(
-                `create embeddings finished, total token usage: ${embeddings.reduce((acc, cur) => acc + cur.token, 0)}`
-            )
-            await KnowledgeEmbedding.destroy({
-                where: {
-                    fileHash: knowledgeFile.fileHash
-                }
-            })
-            let i = 0
-            for (const item of embeddings) {
-                try {
-                    await KnowledgeEmbedding.create({
-                        orgId: knowledgeFile.orgId,
-                        knowledgeId: knowledgeFile.knowledgeId,
-                        fileId: knowledgeFile.id,
-                        fileHash: knowledgeFile.fileHash,
-                        text: item.text,
-                        embedding: formatEmbedding(item.embedding),
-                        index: i++
-                    })
-                } catch (error) {
-                    Logger.error(error.message)
-                }
-            }
-            knowledgeFile.status = FileStatus.DONE
-            await this.knowledgeFileRepository.save(knowledgeFile)
-        } catch (e) {
-            knowledgeFile.status = FileStatus.FAILED
-            knowledgeFile.error = e.message
-            await this.knowledgeFileRepository.save(knowledgeFile)
-        }
-    }
+    //         const embeddings = await this.createEmbeddings(
+    //             contents.map((i) => {
+    //                 return { text: i }
+    //             })
+    //         )
+    //         Logger.log(
+    //             `create embeddings finished, total token usage: ${embeddings.reduce((acc, cur) => acc + cur.token, 0)}`
+    //         )
+    //         await KnowledgeEmbedding.destroy({
+    //             where: {
+    //                 fileHash: knowledgeFile.fileHash
+    //             }
+    //         })
+    //         let i = 0
+    //         for (const item of embeddings) {
+    //             try {
+    //                 await KnowledgeEmbedding.create({
+    //                     orgId: knowledgeFile.orgId,
+    //                     knowledgeId: knowledgeFile.knowledgeId,
+    //                     fileId: knowledgeFile.id,
+    //                     fileHash: knowledgeFile.fileHash,
+    //                     text: item.text,
+    //                     embedding: formatEmbedding(item.embedding),
+    //                     index: i++
+    //                 })
+    //             } catch (error) {
+    //                 Logger.error(error.message)
+    //             }
+    //         }
+    //         knowledgeFile.status = FileStatus.DONE
+    //         await this.knowledgeFileRepository.save(knowledgeFile)
+    //     } catch (e) {
+    //         knowledgeFile.status = FileStatus.FAILED
+    //         knowledgeFile.error = e.message
+    //         await this.knowledgeFileRepository.save(knowledgeFile)
+    //     }
+    // }
 
-    public async processPdfKnowledgeFile1(knowledgeFile: KnowledgeFile, buffer: Buffer) {
+    public async processFile(knowledgeFile: KnowledgeFile, buffer: Buffer) {
         knowledgeFile.status = FileStatus.PROCESSING
         try {
+            await this.knowledgeFileRepository.save(knowledgeFile)
             const tmpDir = mkdtempSync('file')
             const tmpFile = join(tmpDir, knowledgeFile.fileName)
             writeFileSync(tmpFile, buffer)
@@ -294,14 +325,29 @@ export class KnowledgeBaseService implements OnModuleInit {
                 strategy: 'fast'
             })
             const docs = await loader.load()
-            rmSync(tmpDir, { recursive: true })
-            docs.forEach((doc) => {
-                doc.metadata.orgId = knowledgeFile.orgId
-                doc.metadata.knowledgeId = knowledgeFile.knowledgeId
-                doc.metadata.fileId = knowledgeFile.id
-                doc.metadata.fileHash = knowledgeFile.fileHash
+            const splitter = new RecursiveCharacterTextSplitter({
+                chunkSize: 300,
+                chunkOverlap: 100
             })
-            await this.vectorStore.addDocuments(docs)
+            let output: any[] = await splitter.createDocuments(
+                [docs.map((i) => i.pageContent).join('\n')],
+                [
+                    {
+                        orgId: knowledgeFile.orgId,
+                        knowledgeId: knowledgeFile.knowledgeId,
+                        fileId: knowledgeFile.id,
+                        fileHash: knowledgeFile.fileHash,
+                        fileName: knowledgeFile.fileName
+                    }
+                ],
+                {
+                    chunkHeader: `DOCUMENT NAME: [${knowledgeFile.fileName}](${knowledgeFile.fileUrl})\n\n---\n\n`,
+                    appendChunkOverlapHeader: true
+                }
+            )
+            Logger.log(`load ${output.length} docs`)
+            rmSync(tmpDir, { recursive: true })
+            await this.vectorStore.addDocuments(output)
             knowledgeFile.status = FileStatus.DONE
             await this.knowledgeFileRepository.save(knowledgeFile)
         } catch (e) {
@@ -395,47 +441,28 @@ export class KnowledgeBaseService implements OnModuleInit {
         return result.filter((i) => i && i.embedding)
     }
 
-    async getEmbedding(content: string, retry = 0) {
+    async getEmbedding(content: string) {
         try {
-            const response = await this.embeddingApi.createEmbedding({
-                model: 'embedding',
-                input: content
-            })
+            const response = await this.embeddingModel.embedQuery(content)
             return {
                 text: content,
-                embedding: response.data.data[0].embedding,
-                token: response.data.usage.total_tokens
+                embedding: response,
+                token: this.tokenizer.encode(content).length
             }
         } catch (error) {
-            if (retry < 3) {
-                Logger.error(`fetchEmbedding error: ${error.message}, retry ${retry}`, 'fetchEmbedding')
-                await setTimeout(2000)
-                return await this.getEmbedding(content, retry + 1)
-            }
             Logger.error(error.stack, 'fetchEmbedding')
             throw new InternalServerErrorException(error.message)
         }
     }
 
     async getKeywords(text: string) {
-        try {
-            const res = await this.openai.createChatCompletion({
-                model: 'gpt-35-turbo',
-                messages: [
-                    {
-                        role: 'user',
-                        content: `You need to extract keywords from the statement or question and return a series of keywords separated by commas.\ncontent: ${text}\nkeywords: `
-                    }
-                ]
-            })
-            return res.data.choices[0].message.content
-        } catch (error) {
-            Logger.error(error.message)
-            if (error.response) {
-                Logger.error(error.response.data)
-            }
-            throw new InternalServerErrorException(error.message)
-        }
+        return (
+            await this.chatModel.call([
+                new HumanMessage(
+                    `You need to extract keywords from the statement or question and return a series of keywords separated by commas.\ncontent: ${text}\nkeywords: `
+                )
+            ])
+        ).content
     }
 
     cutContext(context: string[]) {
@@ -474,4 +501,40 @@ export class KnowledgeBaseService implements OnModuleInit {
         )
         return context
     }
+
+    async askKnowledge1(options: { question: string; orgId: number; knowledgeId?: number; fileId?: number }) {
+        /* Create the chain */
+        const chain = ConversationalRetrievalQAChain.fromLLM(
+            this.chatModel,
+            this.vectorStore.asRetriever(100, {
+                orgId: options.orgId
+            }),
+            {
+                memory: new BufferMemory({
+                    memoryKey: 'chat_history' // Must be set to "chat_history"
+                }),
+                qaChainOptions: {
+                    prompt: PromptTemplate.fromTemplate(`(以下内容是相关的本地知识库信息,按照相关程度从高到低排序,第一行是文件名和链接)
+\`\`\`
+{context}
+\`\`\`
+请仔细思考,优先使用以上提供的内容进行回答,并告诉我相关的文件和链接。如果无法从内容中找到答案,你需要根据给你自己掌握的知识回答问题。
+请确保使用中文进行回答。
+请在末尾附上相关的文件链接。`),
+                    questionPrompt: PromptTemplate.fromTemplate(`{question}`)
+                },
+                questionGeneratorChainOptions: {
+                    template: `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
+
+Chat History:
+{chat_history}
+Follow Up Input: {question}
+Standalone question:`
+                }
+            }
+        )
+        /* Ask it a question */
+        const res = await chain.call({ question: options.question })
+        console.log(res)
+    }
 }

+ 28 - 50
yarn.lock

@@ -150,12 +150,6 @@
     ora "5.4.1"
     rxjs "6.6.7"
 
-<<<<<<< HEAD
-"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.7", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.21.4":
-  version "7.21.4"
-  resolved "https://registry.npmmirror.com/@babel/code-frame/-/code-frame-7.21.4.tgz"
-  integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==
-=======
 "@anthropic-ai/sdk@^0.5.7":
   version "0.5.8"
   resolved "https://registry.npmmirror.com/@anthropic-ai/sdk/-/sdk-0.5.8.tgz#cc363c9a22510cd7f5e470e68e94b843d688e3c1"
@@ -174,7 +168,6 @@
   version "7.18.6"
   resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz"
   integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
   dependencies:
     "@babel/highlight" "^7.18.6"
 
@@ -2401,6 +2394,13 @@ binary-search@^1.3.5:
   resolved "https://registry.npmmirror.com/binary-search/-/binary-search-1.3.6.tgz#e32426016a0c5092f0f3598836a1c7da3560565c"
   integrity sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==
 
+bindings@^1.5.0:
+  version "1.5.0"
+  resolved "https://registry.npmmirror.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df"
+  integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==
+  dependencies:
+    file-uri-to-path "1.0.0"
+
 bl@^4.1.0:
   version "4.1.0"
   resolved "https://registry.npmmirror.com/bl/-/bl-4.1.0.tgz"
@@ -2573,14 +2573,6 @@ camelcase@^5.3.1:
   resolved "https://registry.npmmirror.com/camelcase/-/camelcase-5.3.1.tgz"
   integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
 
-<<<<<<< HEAD
-camelcase@^6.2.0:
-  version "6.3.0"
-  resolved "https://registry.npmmirror.com/camelcase/-/camelcase-6.3.0.tgz"
-  integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
-
-=======
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
 caniuse-lite@^1.0.30001449:
   version "1.0.30001481"
   resolved "https://registry.npmmirror.com/caniuse-lite/-/caniuse-lite-1.0.30001481.tgz"
@@ -2802,8 +2794,6 @@ combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6:
   dependencies:
     delayed-stream "~1.0.0"
 
-<<<<<<< HEAD
-=======
 commander@4.1.1:
   version "4.1.1"
   resolved "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz"
@@ -2814,7 +2804,6 @@ commander@^10.0.1:
   resolved "https://registry.npmmirror.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06"
   integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==
 
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
 commander@^2.20.0:
   version "2.20.3"
   resolved "https://registry.npmmirror.com/commander/-/commander-2.20.3.tgz"
@@ -3008,25 +2997,7 @@ debug@^2.2.0:
   dependencies:
     ms "2.0.0"
 
-<<<<<<< HEAD
-debug@^2.6.9:
-  version "2.6.9"
-  resolved "https://registry.npmmirror.com/debug/-/debug-2.6.9.tgz"
-  integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
-  dependencies:
-    ms "2.0.0"
-
-debug@^3.1.0:
-  version "3.2.7"
-  resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz"
-  integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==
-  dependencies:
-    ms "^2.1.1"
-
-debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@4, debug@4.x:
-=======
 debug@4, debug@4.x, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4:
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
   version "4.3.4"
   resolved "https://registry.npmmirror.com/debug/-/debug-4.3.4.tgz"
   integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@@ -3674,6 +3645,11 @@ file-entry-cache@^6.0.1:
   dependencies:
     flat-cache "^3.0.4"
 
+file-uri-to-path@1.0.0:
+  version "1.0.0"
+  resolved "https://registry.npmmirror.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd"
+  integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==
+
 file-uri-to-path@2:
   version "2.0.0"
   resolved "https://registry.npmmirror.com/file-uri-to-path/-/file-uri-to-path-2.0.0.tgz"
@@ -4217,6 +4193,14 @@ highlight.js@^10.7.1:
   resolved "https://registry.npmmirror.com/highlight.js/-/highlight.js-10.7.3.tgz"
   integrity sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==
 
+hnswlib-node@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmmirror.com/hnswlib-node/-/hnswlib-node-2.0.0.tgz#d62ef51694a3f09caa6e9c8ffbfd1124fa45b1da"
+  integrity sha512-ib5c5m5JSW0w/8CfufAozrgXSGnWizH62YZkd2JvGVCi2OE45jQZmtQzh5aXszQiJTr8+6bMxnN/Qo2w4e+Z4A==
+  dependencies:
+    bindings "^1.5.0"
+    node-addon-api "^7.0.0"
+
 hpagent@^1.2.0:
   version "1.2.0"
   resolved "https://registry.npmmirror.com/hpagent/-/hpagent-1.2.0.tgz#0ae417895430eb3770c03443456b8d90ca464903"
@@ -5158,16 +5142,12 @@ jsonfile@^6.0.1:
   optionalDependencies:
     graceful-fs "^4.1.6"
 
-<<<<<<< HEAD
-jsonwebtoken@^9.0.0, jsonwebtoken@9.0.0:
-=======
 jsonpointer@^5.0.1:
   version "5.0.1"
   resolved "https://registry.npmmirror.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559"
   integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==
 
 jsonwebtoken@9.0.0, jsonwebtoken@^9.0.0:
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
   version "9.0.0"
   resolved "https://registry.npmmirror.com/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz"
   integrity sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==
@@ -5512,8 +5492,6 @@ mime@1.6.0:
   resolved "https://registry.npmmirror.com/mime/-/mime-1.6.0.tgz"
   integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
 
-<<<<<<< HEAD
-=======
 mime@2.6.0, mime@^2.4.5, mime@^2.5.2:
   version "2.6.0"
   resolved "https://registry.npmmirror.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367"
@@ -5524,7 +5502,6 @@ mime@^3.0.0:
   resolved "https://registry.npmmirror.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7"
   integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==
 
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
 mimic-fn@^2.1.0:
   version "2.1.0"
   resolved "https://registry.npmmirror.com/mimic-fn/-/mimic-fn-2.1.0.tgz"
@@ -5713,17 +5690,10 @@ mquery@5.0.0:
   dependencies:
     debug "4.x"
 
-<<<<<<< HEAD
-ms@*, ms@^2.0.0, ms@^2.1.1, ms@2.1.2:
-  version "2.1.2"
-  resolved "https://registry.npmmirror.com/ms/-/ms-2.1.2.tgz"
-  integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
-=======
 ms@*, ms@2.1.3, ms@^2.0.0, ms@^2.1.3:
   version "2.1.3"
   resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz"
   integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
->>>>>>> 9c70b579dd00d79c345cb91552da11e0ea667f25
 
 ms@2.0.0:
   version "2.0.0"
@@ -5844,6 +5814,7 @@ node-addon-api@*, node-addon-api@^5.0.0:
   resolved "https://registry.npmmirror.com/node-addon-api/-/node-addon-api-5.1.0.tgz"
   integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==
 
+<<<<<<< HEAD
 <<<<<<< HEAD
 node-cron@^3.0.2:
   version "3.0.2"
@@ -5852,6 +5823,13 @@ node-cron@^3.0.2:
   dependencies:
     uuid "8.3.2"
 =======
+=======
+node-addon-api@^7.0.0:
+  version "7.0.0"
+  resolved "https://registry.npmmirror.com/node-addon-api/-/node-addon-api-7.0.0.tgz#8136add2f510997b3b94814f4af1cce0b0e3962e"
+  integrity sha512-vgbBJTS4m5/KkE16t5Ly0WW9hz46swAstv0hYYwMtbG7AznRhNyfLRe8HZAiWIpcHzoO7HxhLuBQj9rJ/Ho0ZA==
+
+>>>>>>> ff402eb (.)
 node-domexception@1.0.0:
   version "1.0.0"
   resolved "https://registry.npmmirror.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"