From 679d7142ebc9bd9040243b441e12611a03cc8f33 Mon Sep 17 00:00:00 2001 From: duanfuxiang Date: Wed, 19 Mar 2025 21:01:32 +0800 Subject: [PATCH] use web worker to run pglite --- esbuild.config.mjs | 7 + package.json | 1 + pnpm-lock.yaml | 292 +++++++++ src/components/chat-view/Chat.tsx | 2 +- src/components/chat-view/MarkdownWithIcon.tsx | 105 ++-- src/components/chat-view/ReactMarkdown.tsx | 18 +- src/core/llm/gemini.ts | 1 + src/core/prompts/sections/capabilities.ts | 11 +- src/core/prompts/tools/search-web.ts | 4 +- src/core/rag/rag-engine.ts | 54 +- src/database/database-manager.ts | 218 +++---- .../conversation/conversation-manager.ts | 4 - .../modules/template/template-manager.ts | 2 - src/database/modules/vector/vector-manager.ts | 575 ++++++++++-------- .../modules/vector/vector-repository.ts | 10 +- src/main.ts | 21 +- src/pgworker/index.ts | 19 + src/pgworker/pglite.worker.ts | 78 +++ src/pgworker/worker.d.ts | 4 + .../components/ProviderModelsPicker.tsx | 2 +- src/types/embedding.ts | 2 - src/types/llm/model.ts | 1 + src/utils/parse-infio-block.ts | 6 +- src/utils/prompt-generator.ts | 2 +- src/utils/web-search.ts | 7 - 25 files changed, 985 insertions(+), 461 deletions(-) create mode 100644 src/pgworker/index.ts create mode 100644 src/pgworker/pglite.worker.ts create mode 100644 src/pgworker/worker.d.ts diff --git a/esbuild.config.mjs b/esbuild.config.mjs index 092eaef..e215033 100644 --- a/esbuild.config.mjs +++ b/esbuild.config.mjs @@ -2,6 +2,7 @@ import path from 'path' import esbuild from 'esbuild' import process from 'process' import builtins from 'builtin-modules' +import inlineWorkerPlugin from "esbuild-plugin-inline-worker"; const banner = `/* THIS IS A GENERATED/BUNDLED FILE BY ESBUILD @@ -17,7 +18,13 @@ const context = await esbuild.context({ }, entryPoints: ['src/main.ts'], bundle: true, + plugins: [inlineWorkerPlugin({ + define: { + 'process': '{}', // 继承主配置 + }, + })], external: [ + 'fs', 'obsidian', 'electron', '@codemirror/autocomplete', diff --git a/package.json b/package.json index d2d9d34..6034982 100644 --- a/package.json +++ b/package.json @@ -68,6 +68,7 @@ "clsx": "^2.1.1", "diff": "^7.0.0", "drizzle-orm": "^0.35.2", + "esbuild-plugin-inline-worker": "^0.1.1", "exponential-backoff": "^3.1.1", "fuse.js": "^7.1.0", "fuzzysort": "^3.1.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a241f55..672af68 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -62,6 +62,9 @@ importers: drizzle-orm: specifier: ^0.35.2 version: 0.35.3(@electric-sql/pglite@0.2.14)(@libsql/client-wasm@0.14.0)(@types/react@18.3.18)(react@18.3.1) + esbuild-plugin-inline-worker: + specifier: ^0.1.1 + version: 0.1.1 exponential-backoff: specifier: ^3.1.1 version: 3.1.2 @@ -476,6 +479,12 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.25.1': + resolution: {integrity: sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.17.3': resolution: {integrity: sha512-XvJsYo3dO3Pi4kpalkyMvfQsjxPWHYjoX4MDiB/FUM4YMfWcXa5l4VCwFWVYI1+92yxqjuqrhNg0CZg3gSouyQ==} engines: {node: '>=12'} @@ -494,6 +503,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.25.1': + resolution: {integrity: sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.17.3': resolution: {integrity: sha512-1Mlz934GvbgdDmt26rTLmf03cAgLg5HyOgJN+ZGCeP3Q9ynYTNMn2/LQxIl7Uy+o4K6Rfi2OuLsr12JQQR8gNg==} engines: {node: '>=12'} @@ -512,6 +527,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.25.1': + resolution: {integrity: sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.17.3': resolution: {integrity: sha512-nuV2CmLS07Gqh5/GrZLuqkU9Bm6H6vcCspM+zjp9TdQlxJtIe+qqEXQChmfc7nWdyr/yz3h45Utk1tUn8Cz5+A==} engines: {node: '>=12'} @@ -530,6 +551,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.25.1': + resolution: {integrity: sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.17.3': resolution: {integrity: sha512-01Hxaaat6m0Xp9AXGM8mjFtqqwDjzlMP0eQq9zll9U85ttVALGCGDuEvra5Feu/NbP5AEP1MaopPwzsTcUq1cw==} engines: {node: '>=12'} @@ -548,6 +575,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.25.1': + resolution: {integrity: sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.17.3': resolution: {integrity: sha512-Eo2gq0Q/er2muf8Z83X21UFoB7EU6/m3GNKvrhACJkjVThd0uA+8RfKpfNhuMCl1bKRfBzKOk6xaYKQZ4lZqvA==} engines: {node: '>=12'} @@ -566,6 +599,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.25.1': + resolution: {integrity: sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.17.3': resolution: {integrity: sha512-CN62ESxaquP61n1ZjQP/jZte8CE09M6kNn3baos2SeUfdVBkWN5n6vGp2iKyb/bm/x4JQzEvJgRHLGd5F5b81w==} engines: {node: '>=12'} @@ -584,6 +623,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.25.1': + resolution: {integrity: sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.17.3': resolution: {integrity: sha512-feq+K8TxIznZE+zhdVurF3WNJ/Sa35dQNYbaqM/wsCbWdzXr5lyq+AaTUSER2cUR+SXPnd/EY75EPRjf4s1SLg==} engines: {node: '>=12'} @@ -602,6 +647,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.25.1': + resolution: {integrity: sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.17.3': resolution: {integrity: sha512-JHeZXD4auLYBnrKn6JYJ0o5nWJI9PhChA/Nt0G4MvLaMrvXuWnY93R3a7PiXeJQphpL1nYsaMcoV2QtuvRnF/g==} engines: {node: '>=12'} @@ -620,6 +671,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.25.1': + resolution: {integrity: sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.17.3': resolution: {integrity: sha512-CLP3EgyNuPcg2cshbwkqYy5bbAgK+VhyfMU7oIYyn+x4Y67xb5C5ylxsNUjRmr8BX+MW3YhVNm6Lq6FKtRTWHQ==} engines: {node: '>=12'} @@ -638,6 +695,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.25.1': + resolution: {integrity: sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.17.3': resolution: {integrity: sha512-FyXlD2ZjZqTFh0sOQxFDiWG1uQUEOLbEh9gKN/7pFxck5Vw0qjWSDqbn6C10GAa1rXJpwsntHcmLqydY9ST9ZA==} engines: {node: '>=12'} @@ -656,6 +719,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.25.1': + resolution: {integrity: sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.17.3': resolution: {integrity: sha512-OrDGMvDBI2g7s04J8dh8/I7eSO+/E7nMDT2Z5IruBfUO/RiigF1OF6xoH33Dn4W/OwAWSUf1s2nXamb28ZklTA==} engines: {node: '>=12'} @@ -674,6 +743,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.25.1': + resolution: {integrity: sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.17.3': resolution: {integrity: sha512-DcnUpXnVCJvmv0TzuLwKBC2nsQHle8EIiAJiJ+PipEVC16wHXaPEKP0EqN8WnBe0TPvMITOUlP2aiL5YMld+CQ==} engines: {node: '>=12'} @@ -692,6 +767,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.25.1': + resolution: {integrity: sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.17.3': resolution: {integrity: sha512-BDYf/l1WVhWE+FHAW3FzZPtVlk9QsrwsxGzABmN4g8bTjmhazsId3h127pliDRRu5674k1Y2RWejbpN46N9ZhQ==} engines: {node: '>=12'} @@ -710,6 +791,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.25.1': + resolution: {integrity: sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.17.3': resolution: {integrity: sha512-WViAxWYMRIi+prTJTyV1wnqd2mS2cPqJlN85oscVhXdb/ZTFJdrpaqm/uDsZPGKHtbg5TuRX/ymKdOSk41YZow==} engines: {node: '>=12'} @@ -728,6 +815,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.25.1': + resolution: {integrity: sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.17.3': resolution: {integrity: sha512-Iw8lkNHUC4oGP1O/KhumcVy77u2s6+KUjieUqzEU3XuWJqZ+AY7uVMrrCbAiwWTkpQHkr00BuXH5RpC6Sb/7Ug==} engines: {node: '>=12'} @@ -746,6 +839,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.25.1': + resolution: {integrity: sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.17.3': resolution: {integrity: sha512-0AGkWQMzeoeAtXQRNB3s4J1/T2XbigM2/Mn2yU1tQSmQRmHIZdkGbVq2A3aDdNslPyhb9/lH0S5GMTZ4xsjBqg==} engines: {node: '>=12'} @@ -764,6 +863,18 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.25.1': + resolution: {integrity: sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.25.1': + resolution: {integrity: sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.17.3': resolution: {integrity: sha512-4+rR/WHOxIVh53UIQIICryjdoKdHsFZFD4zLSonJ9RRw7bhKzVyXbnRPsWSfwybYqw9sB7ots/SYyufL1mBpEg==} engines: {node: '>=12'} @@ -782,6 +893,18 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.25.1': + resolution: {integrity: sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.25.1': + resolution: {integrity: sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.17.3': resolution: {integrity: sha512-cVpWnkx9IYg99EjGxa5Gc0XmqumtAwK3aoz7O4Dii2vko+qXbkHoujWA68cqXjhh6TsLaQelfDO4MVnyr+ODeA==} engines: {node: '>=12'} @@ -800,6 +923,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.25.1': + resolution: {integrity: sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.17.3': resolution: {integrity: sha512-RxmhKLbTCDAY2xOfrww6ieIZkZF+KBqG7S2Ako2SljKXRFi+0863PspK74QQ7JpmWwncChY25JTJSbVBYGQk2Q==} engines: {node: '>=12'} @@ -818,6 +947,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.25.1': + resolution: {integrity: sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.17.3': resolution: {integrity: sha512-0r36VeEJ4efwmofxVJRXDjVRP2jTmv877zc+i+Pc7MNsIr38NfsjkQj23AfF7l0WbB+RQ7VUb+LDiqC/KY/M/A==} engines: {node: '>=12'} @@ -836,6 +971,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.25.1': + resolution: {integrity: sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.17.3': resolution: {integrity: sha512-wgO6rc7uGStH22nur4aLFcq7Wh86bE9cOFmfTr/yxN3BXvDEdCSXyKkO+U5JIt53eTOgC47v9k/C1bITWL/Teg==} engines: {node: '>=12'} @@ -854,6 +995,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.25.1': + resolution: {integrity: sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.17.3': resolution: {integrity: sha512-FdVl64OIuiKjgXBjwZaJLKp0eaEckifbhn10dXWhysMJkWblg3OEEGKSIyhiD5RSgAya8WzP3DNkngtIg3Nt7g==} engines: {node: '>=12'} @@ -872,6 +1019,12 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.25.1': + resolution: {integrity: sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.4.1': resolution: {integrity: sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -1997,6 +2150,9 @@ packages: comma-separated-tokens@2.0.3: resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + commondir@1.0.1: + resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} + concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} @@ -2315,6 +2471,9 @@ packages: resolution: {integrity: sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==} engines: {node: '>= 0.4'} + esbuild-plugin-inline-worker@0.1.1: + resolution: {integrity: sha512-VmFqsQKxUlbM51C1y5bRiMeyc1x2yTdMXhKB6S//++g9aCBg8TfGsbKxl5ZDkCGquqLY+RmEk93TBNd0i35dPA==} + esbuild-register@3.6.0: resolution: {integrity: sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==} peerDependencies: @@ -2335,6 +2494,11 @@ packages: engines: {node: '>=12'} hasBin: true + esbuild@0.25.1: + resolution: {integrity: sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==} + engines: {node: '>=18'} + hasBin: true + escalade@3.2.0: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} @@ -2544,6 +2708,10 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} + find-cache-dir@3.3.2: + resolution: {integrity: sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==} + engines: {node: '>=8'} + find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} @@ -3402,6 +3570,10 @@ packages: peerDependencies: react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc + make-dir@3.1.0: + resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} + engines: {node: '>=8'} + make-dir@4.0.0: resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} engines: {node: '>=10'} @@ -4857,6 +5029,9 @@ snapshots: '@esbuild/aix-ppc64@0.19.12': optional: true + '@esbuild/aix-ppc64@0.25.1': + optional: true + '@esbuild/android-arm64@0.17.3': optional: true @@ -4866,6 +5041,9 @@ snapshots: '@esbuild/android-arm64@0.19.12': optional: true + '@esbuild/android-arm64@0.25.1': + optional: true + '@esbuild/android-arm@0.17.3': optional: true @@ -4875,6 +5053,9 @@ snapshots: '@esbuild/android-arm@0.19.12': optional: true + '@esbuild/android-arm@0.25.1': + optional: true + '@esbuild/android-x64@0.17.3': optional: true @@ -4884,6 +5065,9 @@ snapshots: '@esbuild/android-x64@0.19.12': optional: true + '@esbuild/android-x64@0.25.1': + optional: true + '@esbuild/darwin-arm64@0.17.3': optional: true @@ -4893,6 +5077,9 @@ snapshots: '@esbuild/darwin-arm64@0.19.12': optional: true + '@esbuild/darwin-arm64@0.25.1': + optional: true + '@esbuild/darwin-x64@0.17.3': optional: true @@ -4902,6 +5089,9 @@ snapshots: '@esbuild/darwin-x64@0.19.12': optional: true + '@esbuild/darwin-x64@0.25.1': + optional: true + '@esbuild/freebsd-arm64@0.17.3': optional: true @@ -4911,6 +5101,9 @@ snapshots: '@esbuild/freebsd-arm64@0.19.12': optional: true + '@esbuild/freebsd-arm64@0.25.1': + optional: true + '@esbuild/freebsd-x64@0.17.3': optional: true @@ -4920,6 +5113,9 @@ snapshots: '@esbuild/freebsd-x64@0.19.12': optional: true + '@esbuild/freebsd-x64@0.25.1': + optional: true + '@esbuild/linux-arm64@0.17.3': optional: true @@ -4929,6 +5125,9 @@ snapshots: '@esbuild/linux-arm64@0.19.12': optional: true + '@esbuild/linux-arm64@0.25.1': + optional: true + '@esbuild/linux-arm@0.17.3': optional: true @@ -4938,6 +5137,9 @@ snapshots: '@esbuild/linux-arm@0.19.12': optional: true + '@esbuild/linux-arm@0.25.1': + optional: true + '@esbuild/linux-ia32@0.17.3': optional: true @@ -4947,6 +5149,9 @@ snapshots: '@esbuild/linux-ia32@0.19.12': optional: true + '@esbuild/linux-ia32@0.25.1': + optional: true + '@esbuild/linux-loong64@0.17.3': optional: true @@ -4956,6 +5161,9 @@ snapshots: '@esbuild/linux-loong64@0.19.12': optional: true + '@esbuild/linux-loong64@0.25.1': + optional: true + '@esbuild/linux-mips64el@0.17.3': optional: true @@ -4965,6 +5173,9 @@ snapshots: '@esbuild/linux-mips64el@0.19.12': optional: true + '@esbuild/linux-mips64el@0.25.1': + optional: true + '@esbuild/linux-ppc64@0.17.3': optional: true @@ -4974,6 +5185,9 @@ snapshots: '@esbuild/linux-ppc64@0.19.12': optional: true + '@esbuild/linux-ppc64@0.25.1': + optional: true + '@esbuild/linux-riscv64@0.17.3': optional: true @@ -4983,6 +5197,9 @@ snapshots: '@esbuild/linux-riscv64@0.19.12': optional: true + '@esbuild/linux-riscv64@0.25.1': + optional: true + '@esbuild/linux-s390x@0.17.3': optional: true @@ -4992,6 +5209,9 @@ snapshots: '@esbuild/linux-s390x@0.19.12': optional: true + '@esbuild/linux-s390x@0.25.1': + optional: true + '@esbuild/linux-x64@0.17.3': optional: true @@ -5001,6 +5221,12 @@ snapshots: '@esbuild/linux-x64@0.19.12': optional: true + '@esbuild/linux-x64@0.25.1': + optional: true + + '@esbuild/netbsd-arm64@0.25.1': + optional: true + '@esbuild/netbsd-x64@0.17.3': optional: true @@ -5010,6 +5236,12 @@ snapshots: '@esbuild/netbsd-x64@0.19.12': optional: true + '@esbuild/netbsd-x64@0.25.1': + optional: true + + '@esbuild/openbsd-arm64@0.25.1': + optional: true + '@esbuild/openbsd-x64@0.17.3': optional: true @@ -5019,6 +5251,9 @@ snapshots: '@esbuild/openbsd-x64@0.19.12': optional: true + '@esbuild/openbsd-x64@0.25.1': + optional: true + '@esbuild/sunos-x64@0.17.3': optional: true @@ -5028,6 +5263,9 @@ snapshots: '@esbuild/sunos-x64@0.19.12': optional: true + '@esbuild/sunos-x64@0.25.1': + optional: true + '@esbuild/win32-arm64@0.17.3': optional: true @@ -5037,6 +5275,9 @@ snapshots: '@esbuild/win32-arm64@0.19.12': optional: true + '@esbuild/win32-arm64@0.25.1': + optional: true + '@esbuild/win32-ia32@0.17.3': optional: true @@ -5046,6 +5287,9 @@ snapshots: '@esbuild/win32-ia32@0.19.12': optional: true + '@esbuild/win32-ia32@0.25.1': + optional: true + '@esbuild/win32-x64@0.17.3': optional: true @@ -5055,6 +5299,9 @@ snapshots: '@esbuild/win32-x64@0.19.12': optional: true + '@esbuild/win32-x64@0.25.1': + optional: true + '@eslint-community/eslint-utils@4.4.1(eslint@8.57.1)': dependencies: eslint: 8.57.1 @@ -6491,6 +6738,8 @@ snapshots: comma-separated-tokens@2.0.3: {} + commondir@1.0.1: {} + concat-map@0.0.1: {} console-table-printer@2.12.1: @@ -6777,6 +7026,11 @@ snapshots: is-date-object: 1.1.0 is-symbol: 1.1.1 + esbuild-plugin-inline-worker@0.1.1: + dependencies: + esbuild: 0.25.1 + find-cache-dir: 3.3.2 + esbuild-register@3.6.0(esbuild@0.19.12): dependencies: debug: 4.4.0 @@ -6860,6 +7114,34 @@ snapshots: '@esbuild/win32-ia32': 0.19.12 '@esbuild/win32-x64': 0.19.12 + esbuild@0.25.1: + optionalDependencies: + '@esbuild/aix-ppc64': 0.25.1 + '@esbuild/android-arm': 0.25.1 + '@esbuild/android-arm64': 0.25.1 + '@esbuild/android-x64': 0.25.1 + '@esbuild/darwin-arm64': 0.25.1 + '@esbuild/darwin-x64': 0.25.1 + '@esbuild/freebsd-arm64': 0.25.1 + '@esbuild/freebsd-x64': 0.25.1 + '@esbuild/linux-arm': 0.25.1 + '@esbuild/linux-arm64': 0.25.1 + '@esbuild/linux-ia32': 0.25.1 + '@esbuild/linux-loong64': 0.25.1 + '@esbuild/linux-mips64el': 0.25.1 + '@esbuild/linux-ppc64': 0.25.1 + '@esbuild/linux-riscv64': 0.25.1 + '@esbuild/linux-s390x': 0.25.1 + '@esbuild/linux-x64': 0.25.1 + '@esbuild/netbsd-arm64': 0.25.1 + '@esbuild/netbsd-x64': 0.25.1 + '@esbuild/openbsd-arm64': 0.25.1 + '@esbuild/openbsd-x64': 0.25.1 + '@esbuild/sunos-x64': 0.25.1 + '@esbuild/win32-arm64': 0.25.1 + '@esbuild/win32-ia32': 0.25.1 + '@esbuild/win32-x64': 0.25.1 + escalade@3.2.0: {} escape-string-regexp@2.0.0: {} @@ -7128,6 +7410,12 @@ snapshots: dependencies: to-regex-range: 5.0.1 + find-cache-dir@3.3.2: + dependencies: + commondir: 1.0.1 + make-dir: 3.1.0 + pkg-dir: 4.2.0 + find-up@4.1.0: dependencies: locate-path: 5.0.0 @@ -8214,6 +8502,10 @@ snapshots: dependencies: react: 18.3.1 + make-dir@3.1.0: + dependencies: + semver: 6.3.1 + make-dir@4.0.0: dependencies: semver: 7.7.1 diff --git a/src/components/chat-view/Chat.tsx b/src/components/chat-view/Chat.tsx index f019a2d..d4e5233 100644 --- a/src/components/chat-view/Chat.tsx +++ b/src/components/chat-view/Chat.tsx @@ -14,7 +14,6 @@ import { } from 'react' import { v4 as uuidv4 } from 'uuid' -import { ModeSelect } from './chat-input/ModeSelect' import { ApplyViewState } from '../../ApplyView' import { APPLY_VIEW_TYPE } from '../../constants' import { useApp } from '../../contexts/AppContext' @@ -53,6 +52,7 @@ const readFileContent = (filePath: string): string => { return `Content of file: ${filePath}`; } +import { ModeSelect } from './chat-input/ModeSelect' import PromptInputWithActions, { ChatUserInputRef } from './chat-input/PromptInputWithActions' import { editorStateToPlainText } from './chat-input/utils/editor-state-to-plain-text' import { ChatHistory } from './ChatHistory' diff --git a/src/components/chat-view/MarkdownWithIcon.tsx b/src/components/chat-view/MarkdownWithIcon.tsx index 2189730..669982a 100644 --- a/src/components/chat-view/MarkdownWithIcon.tsx +++ b/src/components/chat-view/MarkdownWithIcon.tsx @@ -1,16 +1,15 @@ import * as Tooltip from '@radix-ui/react-tooltip'; import { Check, CircleCheckBig, CircleHelp, CopyIcon, FilePlus2 } from 'lucide-react'; -import { ComponentPropsWithoutRef, useState } from 'react'; +import { ReactNode, useState } from 'react'; import ReactMarkdown from 'react-markdown'; import rehypeRaw from 'rehype-raw'; import { useApp } from 'src/contexts/AppContext'; - function CopyButton({ message }: { message: string }) { const [copied, setCopied] = useState(false) const handleCopy = async () => { - await navigator.clipboard.writeText(message.trim()) + await navigator.clipboard.writeText(message) setCopied(true) setTimeout(() => { setCopied(false) @@ -49,8 +48,6 @@ function CreateNewFileButton({ message }: { message: string }) { const handleCreate = async () => { const firstLine = message.split('\n')[0].trim().replace(/[\\\/:]/g, ''); const filename = firstLine.slice(0, 200) + (firstLine.length > 200 ? '...' : '') || 'untitled'; - console.log('filename', filename) - console.log('message', message) await app.vault.create(`/${filename}.md`, message) setCreated(true) setTimeout(() => { @@ -68,7 +65,7 @@ function CreateNewFileButton({ message }: { message: string }) { className="infio-chat-message-actions-icon--copied" /> ) : ( - + )} @@ -82,58 +79,70 @@ function CreateNewFileButton({ message }: { message: string }) { ) } -const MarkdownWithIcons = ({ markdownContent, className }: { markdownContent: string, className?: string }) => { - // 预处理markdown内容,将标签转换为ReactMarkdown可以处理的格式 - const processedContent = markdownContent.replace( - /]*\/>/g, - (_, name, size, __, className) => - `` - ); +type IconType = 'ask_followup_question' | 'attempt_completion'; - const rawContent = markdownContent.replace( - /]*\/>/g, - () => `` - ).trim(); +interface MarkdownWithIconsProps { + markdownContent: string; + finish: boolean + className?: string; + iconName?: IconType; + iconSize?: number; + iconClassName?: string; +} - const components = { - span: (props: ComponentPropsWithoutRef<'span'> & { - 'data-icon'?: string; - 'data-size'?: string; - }) => { - if (props['data-icon']) { - const name = props['data-icon']; - const size = props['data-size'] ? Number(props['data-size']) : 16; - const className = props.className || ''; +const MarkdownWithIcons = ({ + markdownContent, + finish, + className, + iconName, + iconSize = 14, + iconClassName = "infio-markdown-icon" +}: MarkdownWithIconsProps) => { + // Handle icon rendering directly without string manipulation + const renderIcon = (): ReactNode => { + if (!iconName) return null; - switch (name) { - case 'ask_followup_question': - return ; - case 'attempt_completion': - return ; - default: - return null; - } - } - return ; - }, + switch (iconName) { + case 'ask_followup_question': + return ; + case 'attempt_completion': + return ; + default: + return null; + } }; + const renderTitle = (): ReactNode => { + if (!iconName) return null; + + switch (iconName) { + case 'ask_followup_question': + return 'Ask Followup Question:'; + case 'attempt_completion': + return 'Task Completion'; + default: + return null; + } + }; + + // Component for markdown content return ( <> - - {processedContent} - - {processedContent && +
+ {iconName && renderIcon()} {renderTitle()} + + {markdownContent} + +
+ {markdownContent && finish &&
- - + +
} - ); }; diff --git a/src/components/chat-view/ReactMarkdown.tsx b/src/components/chat-view/ReactMarkdown.tsx index 3db924a..6a95486 100644 --- a/src/components/chat-view/ReactMarkdown.tsx +++ b/src/components/chat-view/ReactMarkdown.tsx @@ -123,16 +123,22 @@ function ReactMarkdown({ - ${block.result && block.result.trimStart()}`} /> + markdownContent={block.result} + finish={block.finish} + iconName="attempt_completion" + iconSize={14} + iconClassName="infio-markdown-icon" + /> ) : block.type === 'ask_followup_question' ? ( - ${block.question && block.question.trimStart()}`} /> + markdownContent={block.question} + finish={block.finish} + iconName="ask_followup_question" + iconSize={14} + iconClassName="infio-markdown-icon" + /> ) : block.type === 'switch_mode' ? ( Your search query here -Examples: +Examples1: capital of France population statistics 2023 +Examples2: "renewable energy" growth statistics Europe +Examples3: react vs angular vs vue.js comparison ` diff --git a/src/core/rag/rag-engine.ts b/src/core/rag/rag-engine.ts index 34e12a8..1e0d48a 100644 --- a/src/core/rag/rag-engine.ts +++ b/src/core/rag/rag-engine.ts @@ -1,4 +1,4 @@ -import { App } from 'obsidian' +import { App, TFile } from 'obsidian' import { QueryProgressState } from '../../components/chat-view/QueryProgress' import { DBManager } from '../../database/database-manager' @@ -13,7 +13,8 @@ export class RAGEngine { private app: App private settings: InfioSettings private vectorManager: VectorManager - private embeddingModel: EmbeddingModel | null = null + private embeddingModel: EmbeddingModel | null = null + private initialized = false constructor( app: App, @@ -23,7 +24,7 @@ export class RAGEngine { this.app = app this.settings = settings this.vectorManager = dbManager.getVectorManager() - this.embeddingModel = getEmbeddingModel(settings) + this.embeddingModel = getEmbeddingModel(settings) } setSettings(settings: InfioSettings) { @@ -34,16 +35,14 @@ export class RAGEngine { // TODO: Implement automatic vault re-indexing when settings are changed. // Currently, users must manually re-index the vault. async updateVaultIndex( - options: { reindexAll: boolean } = { - reindexAll: false, - }, + options: { reindexAll: boolean }, onQueryProgressChange?: (queryProgress: QueryProgressState) => void, - ): Promise { - if (!this.embeddingModel) { - throw new Error('Embedding model is not set') + ): Promise { + if (!this.embeddingModel) { + throw new Error('Embedding model is not set') } await this.vectorManager.updateVaultIndex( - this.embeddingModel, + this.embeddingModel, { chunkSize: this.settings.ragOptions.chunkSize, excludePatterns: this.settings.ragOptions.excludePatterns, @@ -57,7 +56,23 @@ export class RAGEngine { }) }, ) - } + this.initialized = true + } + + async updateFileIndex(file: TFile) { + await this.vectorManager.UpdateFileVectorIndex( + this.embeddingModel, + this.settings.ragOptions.chunkSize, + file, + ) + } + + async deleteFileIndex(file: TFile) { + await this.vectorManager.DeleteFileVectorIndex( + this.embeddingModel, + file, + ) + } async processQuery({ query, @@ -78,13 +93,19 @@ export class RAGEngine { if (!this.embeddingModel) { throw new Error('Embedding model is not set') } - // TODO: Decide the vault index update strategy. - // Current approach: Update on every query. - await this.updateVaultIndex({ reindexAll: false }, onQueryProgressChange) + + if (!this.initialized) { + await this.updateVaultIndex({ reindexAll: false }, onQueryProgressChange) + } const queryEmbedding = await this.getQueryEmbedding(query) onQueryProgressChange?.({ type: 'querying', - }) + }) + console.log('query, ', { + minSimilarity: this.settings.ragOptions.minSimilarity, + limit: this.settings.ragOptions.limit, + scope, + }) const queryResult = await this.vectorManager.performSimilaritySearch( queryEmbedding, this.embeddingModel, @@ -93,7 +114,8 @@ export class RAGEngine { limit: this.settings.ragOptions.limit, scope, }, - ) + ) + console.log('queryResult', queryResult) onQueryProgressChange?.({ type: 'querying-done', queryResult, diff --git a/src/database/database-manager.ts b/src/database/database-manager.ts index 2bb8a6d..454cc03 100644 --- a/src/database/database-manager.ts +++ b/src/database/database-manager.ts @@ -1,38 +1,33 @@ -import { PGlite } from '@electric-sql/pglite' // @ts-expect-error -import { type PGliteWithLive, live } from '@electric-sql/pglite/live' -import { App, normalizePath } from 'obsidian' +import { type PGliteWithLive } from '@electric-sql/pglite/live' +import { App } from 'obsidian' -import { PGLITE_DB_PATH } from '../constants' +// import { PGLITE_DB_PATH } from '../constants' +import { createAndInitDb } from '../pgworker' import { ConversationManager } from './modules/conversation/conversation-manager' import { TemplateManager } from './modules/template/template-manager' import { VectorManager } from './modules/vector/vector-manager' -import { pgliteResources } from './pglite-resources' -import { migrations } from './sql' +// import { pgliteResources } from './pglite-resources' +// import { migrations } from './sql' export class DBManager { - private app: App - private dbPath: string + // private app: App + // private dbPath: string private db: PGliteWithLive | null = null // private db: PgliteDatabase | null = null private vectorManager: VectorManager private templateManager: TemplateManager private conversationManager: ConversationManager - constructor(app: App, dbPath: string) { + constructor(app: App) { this.app = app - this.dbPath = dbPath + // this.dbPath = dbPath } static async create(app: App): Promise { - const dbManager = new DBManager(app, normalizePath(PGLITE_DB_PATH)) - await dbManager.loadExistingDatabase() - if (!dbManager.db) { - await dbManager.createNewDatabase() - } - await dbManager.migrateDatabase() - await dbManager.save() + const dbManager = new DBManager(app) + dbManager.db = await createAndInitDb() dbManager.vectorManager = new VectorManager(app, dbManager) dbManager.templateManager = new TemplateManager(app, dbManager) @@ -57,81 +52,70 @@ export class DBManager { return this.conversationManager } - private async createNewDatabase() { - const { fsBundle, wasmModule, vectorExtensionBundlePath } = - await this.loadPGliteResources() - this.db = await PGlite.create({ - fsBundle: fsBundle, - wasmModule: wasmModule, - extensions: { - vector: vectorExtensionBundlePath, - live, - }, - }) - } + // private async createNewDatabase() { + // const { fsBundle, wasmModule, vectorExtensionBundlePath } = + // await this.loadPGliteResources() + // this.db = await PGlite.create({ + // fsBundle: fsBundle, + // wasmModule: wasmModule, + // extensions: { + // vector: vectorExtensionBundlePath, + // live, + // }, + // }) + // } - private async loadExistingDatabase() { - try { - const databaseFileExists = await this.app.vault.adapter.exists( - this.dbPath, - ) - if (!databaseFileExists) { - return null - } - const fileBuffer = await this.app.vault.adapter.readBinary(this.dbPath) - const fileBlob = new Blob([fileBuffer], { type: 'application/x-gzip' }) - const { fsBundle, wasmModule, vectorExtensionBundlePath } = - await this.loadPGliteResources() - this.db = await PGlite.create({ - loadDataDir: fileBlob, - fsBundle: fsBundle, - wasmModule: wasmModule, - extensions: { - vector: vectorExtensionBundlePath, - live - }, - }) - // return drizzle(this.pgClient) - } catch (error) { - console.error('Error loading database:', error) - console.log(this.dbPath) - return null - } - } + // private async loadExistingDatabase() { + // try { + // const databaseFileExists = await this.app.vault.adapter.exists( + // this.dbPath, + // ) + // if (!databaseFileExists) { + // return null + // } + // const fileBuffer = await this.app.vault.adapter.readBinary(this.dbPath) + // const fileBlob = new Blob([fileBuffer], { type: 'application/x-gzip' }) + // const { fsBundle, wasmModule, vectorExtensionBundlePath } = + // await this.loadPGliteResources() + // this.db = await PGlite.create({ + // loadDataDir: fileBlob, + // fsBundle: fsBundle, + // wasmModule: wasmModule, + // extensions: { + // vector: vectorExtensionBundlePath, + // live + // }, + // }) + // // return drizzle(this.pgClient) + // } catch (error) { + // console.error('Error loading database:', error) + // console.log(this.dbPath) + // return null + // } + // } - private async migrateDatabase(): Promise { - if (!this.db) { - throw new Error('Database client not initialized'); - } + // private async migrateDatabase(): Promise { + // if (!this.db) { + // throw new Error('Database client not initialized'); + // } - try { - // Execute SQL migrations - for (const [_key, migration] of Object.entries(migrations)) { - // Split SQL into individual commands and execute them one by one - const commands = migration.sql.split('\n\n').filter(cmd => cmd.trim()); - for (const command of commands) { - await this.db.query(command); - } - } - } catch (error) { - console.error('Error executing SQL migrations:', error); - throw error; - } - } + // try { + // // Execute SQL migrations + // for (const [_key, migration] of Object.entries(migrations)) { + // // Split SQL into individual commands and execute them one by one + // const commands = migration.sql.split('\n\n').filter(cmd => cmd.trim()); + // for (const command of commands) { + // await this.db.query(command); + // } + // } + // } catch (error) { + // console.error('Error executing SQL migrations:', error); + // throw error; + // } + // } async save(): Promise { - if (!this.db) { - return - } - try { - const blob: Blob = await this.db.dumpDataDir('gzip') - await this.app.vault.adapter.writeBinary( - this.dbPath, - Buffer.from(await blob.arrayBuffer()), - ) - } catch (error) { - console.error('Error saving database:', error) - } + console.log("need remove") } async cleanup() { @@ -139,37 +123,37 @@ export class DBManager { this.db = null } - private async loadPGliteResources(): Promise<{ - fsBundle: Blob - wasmModule: WebAssembly.Module - vectorExtensionBundlePath: URL - }> { - try { - // Convert base64 to binary data - const wasmBinary = Buffer.from(pgliteResources.wasmBase64, 'base64') - const dataBinary = Buffer.from(pgliteResources.dataBase64, 'base64') - const vectorBinary = Buffer.from(pgliteResources.vectorBase64, 'base64') + // private async loadPGliteResources(): Promise<{ + // fsBundle: Blob + // wasmModule: WebAssembly.Module + // vectorExtensionBundlePath: URL + // }> { + // try { + // // Convert base64 to binary data + // const wasmBinary = Buffer.from(pgliteResources.wasmBase64, 'base64') + // const dataBinary = Buffer.from(pgliteResources.dataBase64, 'base64') + // const vectorBinary = Buffer.from(pgliteResources.vectorBase64, 'base64') - // Create blobs from binary data - const fsBundle = new Blob([dataBinary], { - type: 'application/octet-stream', - }) - const wasmModule = await WebAssembly.compile(wasmBinary) + // // Create blobs from binary data + // const fsBundle = new Blob([dataBinary], { + // type: 'application/octet-stream', + // }) + // const wasmModule = await WebAssembly.compile(wasmBinary) - // Create a blob URL for the vector extension - const vectorBlob = new Blob([vectorBinary], { - type: 'application/gzip', - }) - const vectorExtensionBundlePath = URL.createObjectURL(vectorBlob) + // // Create a blob URL for the vector extension + // const vectorBlob = new Blob([vectorBinary], { + // type: 'application/gzip', + // }) + // const vectorExtensionBundlePath = URL.createObjectURL(vectorBlob) - return { - fsBundle, - wasmModule, - vectorExtensionBundlePath: new URL(vectorExtensionBundlePath), - } - } catch (error) { - console.error('Error loading PGlite resources:', error) - throw error - } - } + // return { + // fsBundle, + // wasmModule, + // vectorExtensionBundlePath: new URL(vectorExtensionBundlePath), + // } + // } catch (error) { + // console.error('Error loading PGlite resources:', error) + // throw error + // } + // } } diff --git a/src/database/modules/conversation/conversation-manager.ts b/src/database/modules/conversation/conversation-manager.ts index cdb8930..2009086 100644 --- a/src/database/modules/conversation/conversation-manager.ts +++ b/src/database/modules/conversation/conversation-manager.ts @@ -30,7 +30,6 @@ export class ConversationManager { updatedAt: new Date(), } await this.repository.create(conversation) - await this.dbManager.save() } async saveConversation(id: string, messages: ChatMessage[]): Promise { @@ -59,7 +58,6 @@ export class ConversationManager { // Update conversation timestamp await this.repository.update(id, { updatedAt: new Date() }) - await this.dbManager.save() } async findConversation(id: string): Promise { @@ -74,7 +72,6 @@ export class ConversationManager { async deleteConversation(id: string): Promise { await this.repository.delete(id) - await this.dbManager.save() } getAllConversations(callback: (conversations: ChatConversationMeta[]) => void): void { @@ -92,7 +89,6 @@ export class ConversationManager { async updateConversationTitle(id: string, title: string): Promise { await this.repository.update(id, { title }) - await this.dbManager.save() } // convert ChatMessage to InsertMessage diff --git a/src/database/modules/template/template-manager.ts b/src/database/modules/template/template-manager.ts index 1d78ee5..a4be3b7 100644 --- a/src/database/modules/template/template-manager.ts +++ b/src/database/modules/template/template-manager.ts @@ -24,7 +24,6 @@ export class TemplateManager { throw new DuplicateTemplateException(template.name) } const created = await this.repository.create(template) - await this.dbManager.save() return created } @@ -45,7 +44,6 @@ export class TemplateManager { async deleteTemplate(id: string): Promise { const deleted = await this.repository.delete(id) - await this.dbManager.save() return deleted } } diff --git a/src/database/modules/vector/vector-manager.ts b/src/database/modules/vector/vector-manager.ts index 17f12c6..61ab56e 100644 --- a/src/database/modules/vector/vector-manager.ts +++ b/src/database/modules/vector/vector-manager.ts @@ -6,10 +6,10 @@ import pLimit from 'p-limit' import { IndexProgress } from '../../../components/chat-view/QueryProgress' import { - LLMAPIKeyInvalidException, - LLMAPIKeyNotSetException, - LLMBaseUrlNotSetException, - LLMRateLimitExceededException, + LLMAPIKeyInvalidException, + LLMAPIKeyNotSetException, + LLMBaseUrlNotSetException, + LLMRateLimitExceededException, } from '../../../core/llm/exception' import { InsertVector, SelectVector } from '../../../database/schema' import { EmbeddingModel } from '../../../types/embedding' @@ -19,260 +19,353 @@ import { DBManager } from '../../database-manager' import { VectorRepository } from './vector-repository' export class VectorManager { - private app: App - private repository: VectorRepository - private dbManager: DBManager + private app: App + private repository: VectorRepository + private dbManager: DBManager - constructor(app: App, dbManager: DBManager) { - this.app = app - this.dbManager = dbManager - this.repository = new VectorRepository(app, dbManager.getPgClient()) - } + constructor(app: App, dbManager: DBManager) { + this.app = app + this.dbManager = dbManager + this.repository = new VectorRepository(app, dbManager.getPgClient()) + } - async performSimilaritySearch( - queryVector: number[], - embeddingModel: EmbeddingModel, - options: { - minSimilarity: number - limit: number - scope?: { - files: string[] - folders: string[] - } - }, - ): Promise< - (Omit & { - similarity: number - })[] - > { - return await this.repository.performSimilaritySearch( - queryVector, - embeddingModel, - options, - ) - } + async performSimilaritySearch( + queryVector: number[], + embeddingModel: EmbeddingModel, + options: { + minSimilarity: number + limit: number + scope?: { + files: string[] + folders: string[] + } + }, + ): Promise< + (Omit & { + similarity: number + })[] + > { + return await this.repository.performSimilaritySearch( + queryVector, + embeddingModel, + options, + ) + } - async updateVaultIndex( - embeddingModel: EmbeddingModel, - options: { - chunkSize: number - excludePatterns: string[] - includePatterns: string[] - reindexAll?: boolean - }, - updateProgress?: (indexProgress: IndexProgress) => void, - ): Promise { - let filesToIndex: TFile[] - if (options.reindexAll) { - filesToIndex = await this.getFilesToIndex({ - embeddingModel: embeddingModel, - excludePatterns: options.excludePatterns, - includePatterns: options.includePatterns, - reindexAll: true, - }) - await this.repository.clearAllVectors(embeddingModel) - } else { - await this.deleteVectorsForDeletedFiles(embeddingModel) - filesToIndex = await this.getFilesToIndex({ - embeddingModel: embeddingModel, - excludePatterns: options.excludePatterns, - includePatterns: options.includePatterns, - }) - await this.repository.deleteVectorsForMultipleFiles( - filesToIndex.map((file) => file.path), - embeddingModel, - ) - } + async updateVaultIndex( + embeddingModel: EmbeddingModel, + options: { + chunkSize: number + excludePatterns: string[] + includePatterns: string[] + reindexAll?: boolean + }, + updateProgress?: (indexProgress: IndexProgress) => void, + ): Promise { + let filesToIndex: TFile[] + if (options.reindexAll) { + filesToIndex = await this.getFilesToIndex({ + embeddingModel: embeddingModel, + excludePatterns: options.excludePatterns, + includePatterns: options.includePatterns, + reindexAll: true, + }) + await this.repository.clearAllVectors(embeddingModel) + } else { + await this.cleanVectorsForDeletedFiles(embeddingModel) + filesToIndex = await this.getFilesToIndex({ + embeddingModel: embeddingModel, + excludePatterns: options.excludePatterns, + includePatterns: options.includePatterns, + }) + await this.repository.deleteVectorsForMultipleFiles( + filesToIndex.map((file) => file.path), + embeddingModel, + ) + } - if (filesToIndex.length === 0) { - return - } + if (filesToIndex.length === 0) { + return + } - const textSplitter = RecursiveCharacterTextSplitter.fromLanguage( - 'markdown', - { - chunkSize: options.chunkSize, - // TODO: Use token-based chunking after migrating to WebAssembly-based tiktoken - // Current token counting method is too slow for practical use - // lengthFunction: async (text) => { - // return await tokenCount(text) - // }, - }, - ) + const textSplitter = RecursiveCharacterTextSplitter.fromLanguage( + 'markdown', + { + chunkSize: options.chunkSize, + // TODO: Use token-based chunking after migrating to WebAssembly-based tiktoken + // Current token counting method is too slow for practical use + // lengthFunction: async (text) => { + // return await tokenCount(text) + // }, + }, + ) - const contentChunks: InsertVector[] = ( - await Promise.all( - filesToIndex.map(async (file) => { - const fileContent = await this.app.vault.cachedRead(file) - const fileDocuments = await textSplitter.createDocuments([ - fileContent, - ]) - return fileDocuments.map((chunk): InsertVector => { - return { - path: file.path, - mtime: file.stat.mtime, + const contentChunks: InsertVector[] = ( + await Promise.all( + filesToIndex.map(async (file) => { + const fileContent = await this.app.vault.cachedRead(file) + const fileDocuments = await textSplitter.createDocuments([ + fileContent, + ]) + return fileDocuments.map((chunk): InsertVector => { + return { + path: file.path, + mtime: file.stat.mtime, content: chunk.pageContent, embedding: [], - metadata: { - startLine: chunk.metadata.loc.lines.from as number, - endLine: chunk.metadata.loc.lines.to as number, - }, - } - }) - }), - ) - ).flat() + metadata: { + startLine: Number(chunk.metadata.loc.lines.from), + endLine: Number(chunk.metadata.loc.lines.to), + }, + } + }) + }), + ) + ).flat() - updateProgress?.({ - completedChunks: 0, - totalChunks: contentChunks.length, - totalFiles: filesToIndex.length, - }) + updateProgress?.({ + completedChunks: 0, + totalChunks: contentChunks.length, + totalFiles: filesToIndex.length, + }) - const embeddingProgress = { completed: 0, inserted: 0 } - const embeddingChunks: InsertVector[] = [] - const batchSize = 100 - const limit = pLimit(50) - const abortController = new AbortController() - const tasks = contentChunks.map((chunk) => - limit(async () => { - if (abortController.signal.aborted) { - throw new Error('Operation was aborted') - } - try { - await backOff( - async () => { - const embedding = await embeddingModel.getEmbedding(chunk.content) - const embeddedChunk = { - path: chunk.path, - mtime: chunk.mtime, - content: chunk.content, - embedding, - metadata: chunk.metadata, - } - embeddingChunks.push(embeddedChunk) - embeddingProgress.completed++ - updateProgress?.({ - completedChunks: embeddingProgress.completed, - totalChunks: contentChunks.length, - totalFiles: filesToIndex.length, - }) + const embeddingProgress = { completed: 0 } + const embeddingChunks: InsertVector[] = [] + const batchSize = 100 + const limit = pLimit(50) + const abortController = new AbortController() + const tasks = contentChunks.map((chunk) => + limit(async () => { + if (abortController.signal.aborted) { + throw new Error('Operation was aborted') + } + try { + await backOff( + async () => { + const embedding = await embeddingModel.getEmbedding(chunk.content) + const embeddedChunk = { + path: chunk.path, + mtime: chunk.mtime, + content: chunk.content, + embedding, + metadata: chunk.metadata, + } + embeddingChunks.push(embeddedChunk) + embeddingProgress.completed++ + updateProgress?.({ + completedChunks: embeddingProgress.completed, + totalChunks: contentChunks.length, + totalFiles: filesToIndex.length, + }) + }, + { + numOfAttempts: 5, + startingDelay: 1000, + timeMultiple: 1.5, + jitter: 'full', + }, + ) + } catch (error) { + abortController.abort() + throw error + } + }), + ) - // Insert vectors in batches - if ( - embeddingChunks.length >= - embeddingProgress.inserted + batchSize || - embeddingChunks.length === contentChunks.length - ) { - await this.repository.insertVectors( - embeddingChunks.slice( - embeddingProgress.inserted, - embeddingProgress.inserted + batchSize, - ), - embeddingModel, - ) - embeddingProgress.inserted += batchSize - } - }, - { - numOfAttempts: 5, - startingDelay: 1000, - timeMultiple: 1.5, - jitter: 'full', - }, - ) - } catch (error) { - abortController.abort() - throw error - } - }), - ) + try { + await Promise.all(tasks) - try { - await Promise.all(tasks) - } catch (error) { - if ( - error instanceof LLMAPIKeyNotSetException || - error instanceof LLMAPIKeyInvalidException || - error instanceof LLMBaseUrlNotSetException - ) { - openSettingsModalWithError(this.app, (error as Error).message) - } else if (error instanceof LLMRateLimitExceededException) { - new Notice(error.message) - } else { - console.error('Error embedding chunks:', error) - throw error - } - } finally { - await this.dbManager.save() - } - } + // all embedding generated, batch insert + if (embeddingChunks.length > 0) { + // batch insert all vectors + let inserted = 0 + while (inserted < embeddingChunks.length) { + const chunksToInsert = embeddingChunks.slice( + inserted, + Math.min(inserted + batchSize, embeddingChunks.length) + ) + await this.repository.insertVectors(chunksToInsert, embeddingModel) + inserted += chunksToInsert.length + } + } + } catch (error) { + if ( + error instanceof LLMAPIKeyNotSetException || + error instanceof LLMAPIKeyInvalidException || + error instanceof LLMBaseUrlNotSetException + ) { + openSettingsModalWithError(this.app, error.message) + } else if (error instanceof LLMRateLimitExceededException) { + new Notice(error.message) + } else { + console.error('Error embedding chunks:', error) + throw error + } + } + } - private async deleteVectorsForDeletedFiles(embeddingModel: EmbeddingModel) { - const indexedFilePaths = - await this.repository.getIndexedFilePaths(embeddingModel) - for (const filePath of indexedFilePaths) { - if (!this.app.vault.getAbstractFileByPath(filePath)) { - await this.repository.deleteVectorsForMultipleFiles( - [filePath], - embeddingModel, - ) - } - } - } + async UpdateFileVectorIndex( + embeddingModel: EmbeddingModel, + chunkSize: number, + file: TFile + ) { - private async getFilesToIndex({ - embeddingModel, - excludePatterns, - includePatterns, - reindexAll, - }: { - embeddingModel: EmbeddingModel - excludePatterns: string[] - includePatterns: string[] - reindexAll?: boolean - }): Promise { - let filesToIndex = this.app.vault.getMarkdownFiles() + // Delete existing vectors for the files + await this.repository.deleteVectorsForSingleFile( + file.path, + embeddingModel, + ) - filesToIndex = filesToIndex.filter((file) => { - return !excludePatterns.some((pattern) => minimatch(file.path, pattern)) - }) + // Embed the files + const textSplitter = RecursiveCharacterTextSplitter.fromLanguage( + 'markdown', + { + chunkSize, + }, + ) + const fileContent = await this.app.vault.cachedRead(file) + const fileDocuments = await textSplitter.createDocuments([ + fileContent, + ]) - if (includePatterns.length > 0) { - filesToIndex = filesToIndex.filter((file) => { - return includePatterns.some((pattern) => minimatch(file.path, pattern)) - }) - } + const contentChunks: InsertVector[] = fileDocuments.map((chunk): InsertVector => { + return { + path: file.path, + mtime: file.stat.mtime, + content: chunk.pageContent, + embedding: [], + metadata: { + startLine: Number(chunk.metadata.loc.lines.from), + endLine: Number(chunk.metadata.loc.lines.to), + }, + } + }) - if (reindexAll) { - return filesToIndex - } + const embeddingChunks: InsertVector[] = [] + const limit = pLimit(50) + const abortController = new AbortController() + const tasks = contentChunks.map((chunk) => + limit(async () => { + if (abortController.signal.aborted) { + throw new Error('Operation was aborted') + } + try { + await backOff( + async () => { + const embedding = await embeddingModel.getEmbedding(chunk.content) + const embeddedChunk = { + path: chunk.path, + mtime: chunk.mtime, + content: chunk.content, + embedding, + metadata: chunk.metadata, + } + embeddingChunks.push(embeddedChunk) + }, + { + numOfAttempts: 5, + startingDelay: 1000, + timeMultiple: 1.5, + jitter: 'full', + }, + ) + } catch (error) { + abortController.abort() + throw error + } + }), + ) - // Check for updated or new files - filesToIndex = await Promise.all( - filesToIndex.map(async (file) => { - const fileChunks = await this.repository.getVectorsByFilePath( - file.path, - embeddingModel, - ) - if (fileChunks.length === 0) { - // File is not indexed, so we need to index it - const fileContent = await this.app.vault.cachedRead(file) - if (fileContent.length === 0) { - // Ignore empty files - return null - } - return file - } - const outOfDate = file.stat.mtime > fileChunks[0].mtime - if (outOfDate) { - // File has changed, so we need to re-index it - return file - } - return null - }), - ).then((files) => files.filter(Boolean)) + try { + await Promise.all(tasks) - return filesToIndex - } + // all embedding generated, batch insert + if (embeddingChunks.length > 0) { + const batchSize = 100 + let inserted = 0 + while (inserted < embeddingChunks.length) { + const chunksToInsert = embeddingChunks.slice(inserted, Math.min(inserted + batchSize, embeddingChunks.length)) + await this.repository.insertVectors(chunksToInsert, embeddingModel) + inserted += chunksToInsert.length + } + } + } catch (error) { + console.error('Error embedding chunks:', error) + } + } + + async DeleteFileVectorIndex( + embeddingModel: EmbeddingModel, + file: TFile + ) { + await this.repository.deleteVectorsForSingleFile(file.path, embeddingModel) + } + + private async cleanVectorsForDeletedFiles( + embeddingModel: EmbeddingModel, + ) { + const indexedFilePaths = await this.repository.getAllIndexedFilePaths(embeddingModel) + const needToDelete = indexedFilePaths.filter(filePath => !this.app.vault.getAbstractFileByPath(filePath)) + if (needToDelete.length > 0) { + await this.repository.deleteVectorsForMultipleFiles( + needToDelete, + embeddingModel, + ) + } + } + + private async getFilesToIndex({ + embeddingModel, + excludePatterns, + includePatterns, + reindexAll, + }: { + embeddingModel: EmbeddingModel + excludePatterns: string[] + includePatterns: string[] + reindexAll?: boolean + }): Promise { + let filesToIndex = this.app.vault.getMarkdownFiles() + + filesToIndex = filesToIndex.filter((file) => { + return !excludePatterns.some((pattern) => minimatch(file.path, pattern)) + }) + + if (includePatterns.length > 0) { + filesToIndex = filesToIndex.filter((file) => { + return includePatterns.some((pattern) => minimatch(file.path, pattern)) + }) + } + + if (reindexAll) { + return filesToIndex + } + + // Check for updated or new files + filesToIndex = await Promise.all( + filesToIndex.map(async (file) => { + const fileChunks = await this.repository.getVectorsByFilePath( + file.path, + embeddingModel, + ) + if (fileChunks.length === 0) { + // File is not indexed, so we need to index it + const fileContent = await this.app.vault.cachedRead(file) + if (fileContent.length === 0) { + // Ignore empty files + return null + } + return file + } + const outOfDate = file.stat.mtime > fileChunks[0].mtime + if (outOfDate) { + // File has changed, so we need to re-index it + return file + } + return null + }), + ).then((files) => files.filter(Boolean)) + + return filesToIndex + } } diff --git a/src/database/modules/vector/vector-repository.ts b/src/database/modules/vector/vector-repository.ts index 6a93679..301562f 100644 --- a/src/database/modules/vector/vector-repository.ts +++ b/src/database/modules/vector/vector-repository.ts @@ -22,7 +22,7 @@ export class VectorRepository { return tableDefinition.name } - async getIndexedFilePaths(embeddingModel: EmbeddingModel): Promise { + async getAllIndexedFilePaths(embeddingModel: EmbeddingModel): Promise { if (!this.db) { throw new DatabaseNotInitializedException() } @@ -80,7 +80,7 @@ export class VectorRepository { if (!this.db) { throw new DatabaseNotInitializedException() } - const tableName = this.getTableName(embeddingModel) + const tableName = this.getTableName(embeddingModel) await this.db.query(`DELETE FROM "${tableName}"`) } @@ -160,7 +160,11 @@ export class VectorRepository { if (conditions.length > 0) { scopeCondition = `AND (${conditions.join(' OR ')})` } - } + } + + const queryVectorLength = `SELECT count(1) FROM "${tableName}"`; + const queryVectorLengthResult = await this.db.query(queryVectorLength) + console.log('queryVectorLengthResult, ', queryVectorLengthResult) const query = ` SELECT diff --git a/src/main.ts b/src/main.ts index 7512209..d9abcbe 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,5 +1,6 @@ // @ts-nocheck import { EditorView } from '@codemirror/view' +// import { PGlite } from '@electric-sql/pglite' import { Editor, MarkdownView, Notice, Plugin, TFile } from 'obsidian' import { ApplyView } from './ApplyView' @@ -25,8 +26,8 @@ import { InfioSettings, parseInfioSettings, } from './types/settings' -import './utils/path' import { getMentionableBlockData } from './utils/obsidian' +import './utils/path' // Remember to rename these classes and interfaces! export default class InfioPlugin extends Plugin { @@ -41,7 +42,7 @@ export default class InfioPlugin extends Plugin { inlineEdit: InlineEdit | null = null private dbManagerInitPromise: Promise | null = null private ragEngineInitPromise: Promise | null = null - + // private pg: PGlite | null = null async onload() { await this.loadSettings() @@ -49,6 +50,9 @@ export default class InfioPlugin extends Plugin { this.settingTab = new InfioSettingTab(this.app, this) this.addSettingTab(this.settingTab) + // create and init pglite db + // this.pg = await createAndInitDb() + // This creates an icon in the left ribbon. this.addRibbonIcon('wand-sparkles', 'Open infio copilot', () => this.openChatView(), @@ -120,6 +124,17 @@ export default class InfioPlugin extends Plugin { this.app.metadataCache.on("changed", (file: TFile) => { if (file) { eventListener.handleFileChange(file); + console.log("file changed: filename: ", file.name); + this.ragEngine?.updateFileIndex(file); + } + }) + ); + + this.registerEvent( + this.app.metadataCache.on("deleted", (file: TFile) => { + if (file) { + console.log("file deleted: filename: ", file.name) + this.ragEngine?.deleteFileIndex(file); } }) ); @@ -322,7 +337,7 @@ export default class InfioPlugin extends Plugin { } onunload() { - this.dbManager?.cleanup() + // this.dbManager?.cleanup() this.dbManager = null } diff --git a/src/pgworker/index.ts b/src/pgworker/index.ts new file mode 100644 index 0000000..901bb28 --- /dev/null +++ b/src/pgworker/index.ts @@ -0,0 +1,19 @@ +import { live } from '@electric-sql/pglite/live'; +import { PGliteWorker } from '@electric-sql/pglite/worker'; + +import PGWorker from './pglite.worker'; + +export const createAndInitDb = async () => { + const worker = new PGWorker(); + + const pg = await PGliteWorker.create( + worker, + { + extensions: { + live, + }, + }, + ) + console.log('PGlite DB created') + return pg +} diff --git a/src/pgworker/pglite.worker.ts b/src/pgworker/pglite.worker.ts new file mode 100644 index 0000000..0c8c0fe --- /dev/null +++ b/src/pgworker/pglite.worker.ts @@ -0,0 +1,78 @@ +// @ts-nocheck +import { PGlite } from '@electric-sql/pglite' + +import { PGliteWorkerOptions, worker } from '@electric-sql/pglite/worker' + +import { pgliteResources } from '../database/pglite-resources' +import { migrations } from '../database/sql' + +export { } + +const loadPGliteResources = async (): Promise<{ + fsBundle: Blob + wasmModule: WebAssembly.Module + vectorExtensionBundlePath: URL +}> => { + try { + // Convert base64 to binary data + const wasmBinary = Buffer.from(pgliteResources.wasmBase64, 'base64') + const dataBinary = Buffer.from(pgliteResources.dataBase64, 'base64') + const vectorBinary = Buffer.from(pgliteResources.vectorBase64, 'base64') + + // Create blobs from binary data + const fsBundle = new Blob([dataBinary], { + type: 'application/octet-stream', + }) + const wasmModule = await WebAssembly.compile(wasmBinary) + + // Create a blob URL for the vector extension + const vectorBlob = new Blob([vectorBinary], { + type: 'application/gzip', + }) + const vectorExtensionBundlePath = URL.createObjectURL(vectorBlob) + + return { + fsBundle, + wasmModule, + vectorExtensionBundlePath: new URL(vectorExtensionBundlePath), + } + } catch (error) { + console.error('Error loading PGlite resources:', error) + throw error + } +} + +worker({ + async init(options: PGliteWorkerOptions) { + let db: PGlite; + try { + const { fsBundle, wasmModule, vectorExtensionBundlePath } = + await loadPGliteResources() + + db = await PGlite.create('idb://infio-db', { + relaxedDurability: true, + fsBundle: fsBundle, + wasmModule: wasmModule, + ...options, + extensions: { + ...options.extensions, + vector: vectorExtensionBundlePath, + }, + }) + } catch (error) { + console.error('Error creating PGlite instance:', error) + throw error + } + + // Execute SQL migrations + for (const [_key, migration] of Object.entries(migrations)) { + // Split SQL into individual commands and execute them one by one + const commands = migration.sql.split('\n\n').filter(cmd => cmd.trim()); + for (const command of commands) { + await db.exec(command); + } + } + + return db + }, +}) diff --git a/src/pgworker/worker.d.ts b/src/pgworker/worker.d.ts new file mode 100644 index 0000000..42d7eb9 --- /dev/null +++ b/src/pgworker/worker.d.ts @@ -0,0 +1,4 @@ +declare module 'pglite.worker' { + const WorkerFactory: new () => Worker; + export default WorkerFactory; +} \ No newline at end of file diff --git a/src/settings/components/ProviderModelsPicker.tsx b/src/settings/components/ProviderModelsPicker.tsx index 9f394ed..3303520 100644 --- a/src/settings/components/ProviderModelsPicker.tsx +++ b/src/settings/components/ProviderModelsPicker.tsx @@ -234,7 +234,7 @@ export const ComboBoxComponent: React.FC = ({
- [{modelProvider}]{modelId} + [{modelProvider}] {modelId}
\n${currentFileContent}\n` : undefined diff --git a/src/utils/web-search.ts b/src/utils/web-search.ts index 0e3a9fa..6928831 100644 --- a/src/utils/web-search.ts +++ b/src/utils/web-search.ts @@ -20,8 +20,6 @@ export async function webSearch(query: string, serperApiKey: string): Promise { const url = `${SERPER_BASE_URL}?q=${encodeURIComponent(query)}&engine=google&api_key=${serperApiKey}&num=20`; - console.log(url) - https.get(url, (res: any) => { let data = ''; @@ -31,7 +29,6 @@ export async function webSearch(query: string, serperApiKey: string): Promise { try { - console.log(data) let parsedData: SearchResponse; try { parsedData = JSON.parse(data); @@ -90,8 +87,6 @@ export async function fetchUrlsContent(urls: string[], apiKey: string): Promise< } }); - console.log('fetchUrlsContent', results); - Promise.all(results).then((texts) => { resolve(texts.join('\n\n')); }).catch((error) => { @@ -123,8 +118,6 @@ function fetchJina(url: string, apiKey: string): Promise { }); res.on('end', () => { - console.log(data); - try { // check if there is an error response const response = JSON.parse(data);