diff --git a/.cspell.json b/.cspell.json index 213394b..bbe91d8 100644 --- a/.cspell.json +++ b/.cspell.json @@ -1,10 +1,37 @@ { "$schema": "https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json", "version": "0.2", - "ignorePaths": ["**/*.json", "**/*.css", "node_modules", "**/*.log", "./src/adapters/supabase/**/**.ts"], + "ignorePaths": ["**/*.json", "**/*.css", "node_modules", "**/*.log", "./src/adapters/supabase/**/**.ts", "/supabase/*"], "useGitignore": true, "language": "en", - "words": ["Nektos", "dataurl", "devpool", "outdir", "servedir", "Supabase", "SUPABASE", "typebox", "ubiquibot", "Smee"], + "words": [ + "mswjs", + "Nektos", + "dataurl", + "devpool", + "outdir", + "servedir", + "Supabase", + "SUPABASE", + "typebox", + "ubiquibot", + "Smee", + "sonarjs", + "knip", + "mischeck", + "convo", + "ubqbot", + "behaviour", + "voyageai", + "Rerankers", + "reranker", + "rerank", + "reranked", + "mixtral", + "nemo", + "Reranking", + "mistralai" + ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], "ignoreRegExpList": ["[0-9a-fA-F]{6}"] diff --git a/.dev.vars.example b/.dev.vars.example index e49d79a..e39f3dd 100644 --- a/.dev.vars.example +++ b/.dev.vars.example @@ -1 +1,4 @@ -MY_SECRET="MY_SECRET" +OPENAI_API_KEY="" +SUPABASE_URL="" +SUPABASE_KEY="" +VOYAGEAI_API_KEY="" \ No newline at end of file diff --git a/.env.example b/.env.example deleted file mode 100644 index e49d79a..0000000 --- a/.env.example +++ /dev/null @@ -1 +0,0 @@ -MY_SECRET="MY_SECRET" diff --git a/.github/knip.ts b/.github/knip.ts index 17857ad..2ca6fdf 100644 --- a/.github/knip.ts +++ b/.github/knip.ts @@ -6,7 +6,7 @@ const config: KnipConfig = { ignore: ["src/types/config.ts", "**/__mocks__/**", "**/__fixtures__/**"], ignoreExportsUsedInFile: true, // eslint can also be safely ignored as per the docs: https://knip.dev/guides/handling-issues#eslint--jest - ignoreDependencies: ["eslint-config-prettier", "eslint-plugin-prettier", "@mswjs/data"], + ignoreDependencies: ["eslint-config-prettier", "eslint-plugin-prettier", "tsx"], eslint: true, }; diff --git a/.github/workflows/compute.yml b/.github/workflows/compute.yml deleted file mode 100644 index 3d204b1..0000000 --- a/.github/workflows/compute.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: "the name of the plugin" - -on: - workflow_dispatch: - inputs: - stateId: - description: "State Id" - eventName: - description: "Event Name" - eventPayload: - description: "Event Payload" - settings: - description: "Settings" - authToken: - description: "Auth Token" - ref: - description: "Ref" - -jobs: - compute: - name: "plugin name" - runs-on: ubuntu-latest - permissions: write-all - env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL }} - SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} - - steps: - - uses: actions/checkout@v4 - - - name: setup node - uses: actions/setup-node@v4 - with: - node-version: "20.10.0" - - - name: install dependencies - run: yarn - - - name: execute directive - run: npx tsx ./src/main.ts - id: plugin-name - env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL }} - SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} diff --git a/.github/workflows/worker-delete.yml b/.github/workflows/worker-delete.yml new file mode 100644 index 0000000..f715a20 --- /dev/null +++ b/.github/workflows/worker-delete.yml @@ -0,0 +1,44 @@ +name: Delete Deployment + +on: + delete: + +jobs: + delete: + runs-on: ubuntu-latest + name: Delete Deployment + steps: + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20.10.0" + + - name: Enable corepack + run: corepack enable + + - uses: actions/checkout@v4 + + - name: Get Deleted Branch Name + id: get_branch + run: | + branch_name=$(echo '${{ github.event.ref }}' | sed 's#refs/heads/##' | sed 's#[^a-zA-Z0-9]#-#g') + echo "branch_name=$branch_name" >> $GITHUB_ENV + - name: Retrieve and Construct Full Worker Name + id: construct_worker_name + run: | + base_name=$(grep '^name = ' wrangler.toml | head -n 1 | sed 's/^name = "\(.*\)"$/\1/') + full_worker_name="${base_name}-${{ env.branch_name }}" + # Make sure that it doesnt exceed 63 characters or it will break RFC 1035 + full_worker_name=$(echo "${full_worker_name}" | cut -c 1-63) + echo "full_worker_name=$full_worker_name" >> $GITHUB_ENV + - name: Delete Deployment with Wrangler + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: delete --name ${{ env.full_worker_name }} + + - name: Output Deletion Result + run: | + echo "### Deployment URL" >> $GITHUB_STEP_SUMMARY + echo 'Deployment `${{ env.full_worker_name }}` has been deleted.' >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/worker-deploy.yml b/.github/workflows/worker-deploy.yml new file mode 100644 index 0000000..cba478c --- /dev/null +++ b/.github/workflows/worker-deploy.yml @@ -0,0 +1,48 @@ +name: Deploy Worker + +on: + push: + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + name: Deploy + steps: + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20.10.0" + + - name: Enable corepack + run: corepack enable + + - uses: actions/checkout@v4 + + - name: Update wrangler.toml Name Field + run: | + branch_name=$(echo '${{ github.event.ref }}' | sed 's#refs/heads/##' | sed 's#[^a-zA-Z0-9]#-#g') + # Extract base name from wrangler.toml + base_name=$(grep '^name = ' wrangler.toml | head -n 1 | sed 's/^name = "\(.*\)"$/\1/') + # Concatenate branch name with base name + new_name="${base_name}-${branch_name}" + # Truncate the new name to 63 characters for RFC 1035 + new_name=$(echo "$new_name" | cut -c 1-63) + # Update the wrangler.toml file + sed -i '0,/^name = .*/{s/^name = .*/name = "'"$new_name"'"/}' wrangler.toml + echo "Updated wrangler.toml name to: $new_name" + - name: Deploy with Wrangler + id: wrangler_deploy + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + secrets: | + OPENAI_API_KEY + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + + - name: Write Deployment URL to Summary + run: | + echo "### Deployment URL" >> $GITHUB_STEP_SUMMARY + echo "${{ steps.wrangler_deploy.outputs.deployment-url }}" >> $GITHUB_STEP_SUMMARY diff --git a/README.md b/README.md index 4dcb9f1..ae289dc 100644 --- a/README.md +++ b/README.md @@ -1,93 +1,41 @@ -# `@ubiquibot/plugin-template` +# `@ubiquity-os/command-ask` -## Prerequisites +This is a highly context aware GitHub organization integrated bot that uses the OpenAI GPT-4o model to provide highly relevant answers to questions and queries in GitHub issues and pull requests. -- A good understanding of how the [kernel](https://github.com/ubiquity/ubiquibot-kernel) works and how to interact with it. -- A basic understanding of the Ubiquibot configuration and how to define your plugin's settings. +## Usage -## Getting Started +In any repository where your Ubiquity OS app is installed, both issues and pull requests alike, you simply mention `@UbiquityOS` with your question or query and using the latest OpenAi GPT-4o model, the bot will provide you with a highly relevant answer. -1. Create a new repository using this template. -2. Clone the repository to your local machine. -3. Install the dependencies preferably using `yarn` or `bun`. +## How it works -## Creating a new plugin +With it's huge context window, we are able to feed the entire conversational history to the model which we obtain by recursively fetching any referenced issues or pull requests from the chat history. This allows the model to have a very deep understanding of the current scope and provide highly relevant answers. -- If your plugin is to be used as a slash command which should have faster response times as opposed to longer running GitHub action tasks, you should use the `worker` type. +As it receives everything from discussions to pull request diffs and review comments, it is a highly versatile and capable bot that can assist in a wide range of scenarios. -1. Ensure you understand and have setup the [kernel](https://github.com/ubiquity/ubiquibot-kernel). -2. Update [compute.yml](./.github/workflows/compute.yml) with your plugin's name and update the `id`. -3. Update [context.ts](./src/types/context.ts) with the events that your plugin will fire on. -4. Update [plugin-inputs.ts](./src/types/plugin-inputs.ts) to match the `with:` settings in your org or repo level configuration. +## Installation -- Your plugin config should look similar to this: +`ubiquibot-config.yml`: ```yml -- plugin: /:compute.yml@development - name: plugin-name - id: plugin-name-command - description: "Plugin description" # small description of what the plugin does - command: "" # if you are creating a plugin with a slash command - example: "" # how to invoke the slash command - with: # these are the example settings, the kernel passes these to the plugin. - disabledCommands: [] - timers: - reviewDelayTolerance: 86000 - taskStaleTimeoutDuration: 2580000 - miscellaneous: - maxConcurrentTasks: 3 - labels: - time: [] - priority: [] +plugins: + - uses: + - plugin: http://localhost:4000 + with: + model: "" + openAiBaseUrl: "" ``` -###### At this stage, your plugin will fire on your defined events with the required settings passed in from the kernel. You can now start writing your plugin's logic. +`.dev.vars` (for local testing): -5. Start building your plugin by adding your logic to the [plugin.ts](./src/plugin.ts) file. +```sh +# OpenAI API key +OPENAI_API_KEY=your-api-key +UBIQUITY_OS_APP_NAME="UbiquityOS" -## Testing a plugin - -### Worker Plugins - -- `yarn/bun worker` - to run the worker locally. -- To trigger the worker, `POST` requests to http://localhost:4000/ with an event payload similar to: - -```ts -await fetch("http://localhost:4000/", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - stateId: "", - eventName: "", - eventPayload: "", - settings: "", - ref: "", - authToken: "", - }), -}); ``` -A full example can be found [here](https://github.com/ubiquibot/assistive-pricing/blob/623ea3f950f04842f2d003bda3fc7b7684e41378/tests/http/request.http). - -### Action Plugins - -- Ensure the kernel is running and listening for events. -- Fire an event in/to the repo where the kernel is installed. This can be done in a number of ways, the easiest being via the GitHub UI or using the GitHub API, such as posting a comment, opening an issue, etc in the org/repo where the kernel is installed. -- The kernel will process the event and dispatch it using the settings defined in your `.ubiquibot-config.yml`. -- The `compute.yml` workflow will run and execute your plugin's logic. -- You can view the logs in the Actions tab of your repo. +## Testing -[Nektos Act](https://github.com/nektos/act) - a tool for running GitHub Actions locally. - -## More information - -- [Full Ubiquibot Configuration](https://github.com/ubiquity/ubiquibot/blob/0fde7551585499b1e0618ec8ea5e826f11271c9c/src/types/configuration-types.ts#L62) - helpful for defining your plugin's settings as they are strongly typed and will be validated by the kernel. -- [Ubiquibot V1](https://github.com/ubiquity/ubiquibot) - helpful for porting V1 functionality to V2, helper/utility functions, types, etc. Everything is based on the V1 codebase but with a more modular approach. When using V1 code, keep in mind that most all code will need refactored to work with the new V2 architecture. - -## Examples - -- [Start/Stop Slash Command](https://github.com/ubq-testing/start-stop-module) - simple -- [Assistive Pricing Plugin](https://github.com/ubiquibot/assistive-pricing) - complex -- [Conversation Rewards](https://github.com/ubiquibot/conversation-rewards) - really complex +```sh +yarn test +``` diff --git a/eslint.config.mjs b/eslint.config.mjs index e53d263..c714515 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -9,7 +9,7 @@ export default tsEslint.config({ "@typescript-eslint": tsEslint.plugin, "check-file": checkFile, }, - ignores: [".github/knip.ts"], + ignores: [".github/knip.ts", ".wrangler/**/*.ts", ".wrangler/**/*.js"], extends: [eslint.configs.recommended, ...tsEslint.configs.recommended, sonarjs.configs.recommended], languageOptions: { parser: tsEslint.parser, diff --git a/manifest.json b/manifest.json new file mode 100644 index 0000000..5d6ce58 --- /dev/null +++ b/manifest.json @@ -0,0 +1,5 @@ +{ + "name": "command-ask", + "description": "A highly context aware organization integrated chatbot", + "ubiquity:listeners": ["issue_comment.created"] +} diff --git a/package.json b/package.json index cbe5fb0..31e77a6 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { - "name": "plugin-template", + "name": "@ubiquity-os/command-ask", "version": "1.0.0", - "description": "Ubiquibot plugin template repository with TypeScript support.", + "description": "A highly context aware organization integrated chatbot", "author": "Ubiquity DAO", "license": "MIT", "main": "src/worker.ts", @@ -17,7 +17,7 @@ "knip-ci": "knip --no-exit-code --reporter json --config .github/knip.ts", "prepare": "husky install", "test": "jest --setupFiles dotenv/config --coverage", - "worker": "wrangler dev --env dev --port 4000" + "worker": "wrangler dev --env dev --port 5000" }, "keywords": [ "typescript", @@ -27,14 +27,16 @@ "open-source" ], "dependencies": { - "@actions/core": "1.10.1", - "@actions/github": "6.0.0", + "@mswjs/data": "^0.16.2", "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", - "@supabase/supabase-js": "2.43.5", - "dotenv": "16.4.5", - "typebox-validators": "0.3.5" + "@supabase/supabase-js": "^2.45.4", + "@ubiquity-dao/ubiquibot-logger": "^1.3.0", + "dotenv": "^16.4.5", + "openai": "^4.63.0", + "typebox-validators": "0.3.5", + "voyageai": "^0.0.1-5" }, "devDependencies": { "@commitlint/cli": "19.3.0", @@ -44,7 +46,7 @@ "@cspell/dict-typescript": "3.1.5", "@eslint/js": "9.5.0", "@jest/globals": "29.7.0", - "@mswjs/data": "0.16.1", + "@types/jest": "^29.5.12", "@types/node": "20.14.5", "cspell": "8.9.0", "eslint": "9.5.0", diff --git a/src/adapters/index.ts b/src/adapters/index.ts index 23fb4b3..5040ebd 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -1,17 +1,31 @@ import { SupabaseClient } from "@supabase/supabase-js"; -import { Context } from "../types/context"; -import { Access } from "./supabase/helpers/access"; -import { User } from "./supabase/helpers/user"; -import { Label } from "./supabase/helpers/label"; -import { Super } from "./supabase/helpers/supabase"; +import { Context } from "../types"; +import { Comment } from "./supabase/helpers/comment"; +import { SuperSupabase } from "./supabase/helpers/supabase"; +import { Embedding as VoyageEmbedding } from "./voyage/helpers/embedding"; +import { SuperVoyage } from "./voyage/helpers/voyage"; +import { VoyageAIClient } from "voyageai"; +import { Issue } from "./supabase/helpers/issues"; +import { SuperOpenAi } from "./openai/helpers/openai"; +import OpenAI from "openai"; +import { Completions } from "./openai/helpers/completions"; +import { Rerankers } from "./voyage/helpers/rerankers"; -export function createAdapters(supabaseClient: SupabaseClient, context: Context) { +export function createAdapters(supabaseClient: SupabaseClient, voyage: VoyageAIClient, openai: OpenAI, context: Context) { return { supabase: { - access: new Access(supabaseClient, context), - user: new User(supabaseClient, context), - label: new Label(supabaseClient, context), - super: new Super(supabaseClient, context), + comment: new Comment(supabaseClient, context), + issue: new Issue(supabaseClient, context), + super: new SuperSupabase(supabaseClient, context), + }, + voyage: { + reranker: new Rerankers(voyage, context), + embedding: new VoyageEmbedding(voyage, context), + super: new SuperVoyage(voyage, context), + }, + openai: { + completions: new Completions(openai, context), + super: new SuperOpenAi(openai, context), }, }; } diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts new file mode 100644 index 0000000..f68f305 --- /dev/null +++ b/src/adapters/openai/helpers/completions.ts @@ -0,0 +1,79 @@ +import OpenAI from "openai"; +import { Context } from "../../../types"; +import { SuperOpenAi } from "./openai"; +const MAX_TOKENS = 7000; + +export interface CompletionsType { + answer: string; + tokenUsage: { + input: number; + output: number; + total: number; + }; +} + +export class Completions extends SuperOpenAi { + protected context: Context; + + constructor(client: OpenAI, context: Context) { + super(client, context); + this.context = context; + } + + async createCompletion( + prompt: string, + model: string = "o1-mini", + additionalContext: string[], + localContext: string[], + groundTruths: string[], + botName: string + ): Promise { + const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ + model: model, + messages: [ + { + role: "system", + content: [ + { + type: "text", + text: + "You Must obey the following ground truths: [" + + groundTruths.join(":") + + "]\n" + + "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + + "Your name is : " + + botName + + "\n" + + "Primary Context: " + + additionalContext.join("\n") + + "\nLocal Context: " + + localContext.join("\n"), + }, + ], + }, + { + role: "user", + content: [ + { + type: "text", + text: prompt, + }, + ], + }, + ], + temperature: 0.2, + max_tokens: MAX_TOKENS, + top_p: 0.5, + frequency_penalty: 0, + presence_penalty: 0, + response_format: { + type: "text", + }, + }); + const answer = res.choices[0].message; + if (answer && answer.content && res.usage) { + return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } }; + } + return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } }; + } +} diff --git a/src/adapters/openai/helpers/openai.ts b/src/adapters/openai/helpers/openai.ts new file mode 100644 index 0000000..11457c6 --- /dev/null +++ b/src/adapters/openai/helpers/openai.ts @@ -0,0 +1,11 @@ +import { OpenAI } from "openai"; +import { Context } from "../../../types/context"; + +export class SuperOpenAi { + protected client: OpenAI; + protected context: Context; + constructor(client: OpenAI, context: Context) { + this.client = client; + this.context = context; + } +} diff --git a/src/adapters/supabase/helpers/access.ts b/src/adapters/supabase/helpers/access.ts deleted file mode 100644 index dc32281..0000000 --- a/src/adapters/supabase/helpers/access.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Super } from "./supabase"; -import { Context } from "../../../types/context"; - -export class Access extends Super { - constructor(supabase: SupabaseClient, context: Context) { - super(supabase, context); - } - - public async getAccess(userId: number, repositoryId: number) { - const { data, error } = await this.supabase - .from("access") - .select("*") - .filter("user_id", "eq", userId) - .filter("repository_id", "eq", repositoryId) - .limit(1) - .maybeSingle(); - - if (error) { - this.context.logger.fatal(error.message, error); - throw new Error(error.message); - } - return data; - } - - public async setAccess(userId: number, repositoryId: number, labels: string[]) { - if (!labels.length) { - return this.clearAccess(userId, repositoryId); - } - const { data, error } = await this.supabase - .from("access") - .upsert({ - user_id: userId, - repository_id: repositoryId, - labels: labels, - }) - .select() - .maybeSingle(); - - if (error) throw new Error(error.message); - return data; - } - - public async clearAccess(userId: number, repositoryId: number): Promise { - const { data, error } = await this.supabase.from("access").delete().filter("user_id", "eq", userId).filter("repository_id", "eq", repositoryId); - if (error) throw new Error(error.message); - return data; - } -} diff --git a/src/adapters/supabase/helpers/comment.ts b/src/adapters/supabase/helpers/comment.ts new file mode 100644 index 0000000..d09f24a --- /dev/null +++ b/src/adapters/supabase/helpers/comment.ts @@ -0,0 +1,49 @@ +import { SupabaseClient } from "@supabase/supabase-js"; +import { SuperSupabase } from "./supabase"; +import { Context } from "../../../types/context"; + +export interface CommentType { + id: string; + plaintext: string; + markdown?: string; + author_id: number; + created_at: string; + modified_at: string; + embedding: number[]; +} + +export interface CommentSimilaritySearchResult { + comment_id: string; + comment_plaintext: string; + comment_issue_id: string; + similarity: number; + text_similarity: number; +} + +export class Comment extends SuperSupabase { + constructor(supabase: SupabaseClient, context: Context) { + super(supabase, context); + } + async getComment(commentNodeId: string): Promise { + const { data, error } = await this.supabase.from("issue_comments").select("*").eq("id", commentNodeId); + if (error) { + this.context.logger.error("Error getting comment", error); + } + return data; + } + + async findSimilarComments(query: string, threshold: number, currentId: string): Promise { + const embedding = await this.context.adapters.voyage.embedding.createEmbedding({ text: query, prompt: "This is a query for the stored documents:" }); + const { data, error } = await this.supabase.rpc("find_similar_comments", { + current_id: currentId, + query_text: query, + query_embedding: embedding, + threshold: threshold, + max_results: 10, + }); + if (error) { + this.context.logger.error("Error finding similar comments", error); + } + return data; + } +} diff --git a/src/adapters/supabase/helpers/issues.ts b/src/adapters/supabase/helpers/issues.ts new file mode 100644 index 0000000..142ef02 --- /dev/null +++ b/src/adapters/supabase/helpers/issues.ts @@ -0,0 +1,50 @@ +import { SupabaseClient } from "@supabase/supabase-js"; +import { SuperSupabase } from "./supabase"; +import { Context } from "../../../types/context"; + +export interface IssueSimilaritySearchResult { + issue_id: string; + issue_plaintext: string; + similarity: number; + text_similarity: number; +} + +export interface IssueType { + id: string; + markdown?: string; + plaintext?: string; + payload?: Record; + author_id: number; + created_at: string; + modified_at: string; + embedding: number[]; +} + +export class Issue extends SuperSupabase { + constructor(supabase: SupabaseClient, context: Context) { + super(supabase, context); + } + async getIssue(issueNodeId: string): Promise { + const { data, error } = await this.supabase.from("issues").select("*").eq("id", issueNodeId).returns(); + if (error) { + this.context.logger.error("Error getting issue", error); + return null; + } + return data; + } + async findSimilarIssues(plaintext: string, threshold: number, currentId: string): Promise { + const embedding = await this.context.adapters.voyage.embedding.createEmbedding({ text: plaintext, prompt: "This is a query for the stored documents:" }); + const { data, error } = await this.supabase.rpc("find_similar_issue_ftse", { + current_id: currentId, + query_text: plaintext, + query_embedding: embedding, + threshold: threshold, + max_results: 10, + }); + if (error) { + this.context.logger.error("Error finding similar issues", error); + return []; + } + return data; + } +} diff --git a/src/adapters/supabase/helpers/label.ts b/src/adapters/supabase/helpers/label.ts deleted file mode 100644 index 77e0288..0000000 --- a/src/adapters/supabase/helpers/label.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Super } from "./supabase"; -import { Context } from "../../../types/context"; - -export class Label extends Super { - constructor(supabase: SupabaseClient, context: Context) { - super(supabase, context); - } - - async saveLabelChange({ - previousLabel, - currentLabel, - authorized, - userId, - repositoryId, - }: { - previousLabel: string; - currentLabel: string; - authorized: boolean; - userId: number; - repositoryId: number; - }) { - const { data, error } = await this.supabase - .from("labels") - .insert({ - label_from: previousLabel, - label_to: currentLabel, - authorized: authorized, - user_id: userId, - repository_id: repositoryId, - }) - .select() - .single(); - - if (error) throw new Error(error.message); - return data; - } - - async getLabelChanges(repositoryNodeId: string) { - const { data, error } = await this.supabase.from("labels").select("*").eq("repository_id", repositoryNodeId).eq("authorized", false); - - if (error) throw new Error(error.message); - return data; - } - - async approveLabelChange(id: number): Promise { - const { data, error } = await this.supabase.from("labels").update({ authorized: true }).eq("id", id); - - if (error) throw new Error(error.message); - return data; - } -} diff --git a/src/adapters/supabase/helpers/supabase.ts b/src/adapters/supabase/helpers/supabase.ts index 7a13b85..34e845c 100644 --- a/src/adapters/supabase/helpers/supabase.ts +++ b/src/adapters/supabase/helpers/supabase.ts @@ -1,7 +1,7 @@ import { SupabaseClient } from "@supabase/supabase-js"; import { Context } from "../../../types/context"; -export class Super { +export class SuperSupabase { protected supabase: SupabaseClient; protected context: Context; diff --git a/src/adapters/supabase/helpers/user.ts b/src/adapters/supabase/helpers/user.ts deleted file mode 100644 index fa8b687..0000000 --- a/src/adapters/supabase/helpers/user.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Super } from "./supabase"; -import { Context } from "../../../types/context"; - -type Wallet = { - address: string; -}; - -export class User extends Super { - user_id: string | undefined; - comment_id: string | undefined; - issue_id: string | undefined; - repository_id: string | undefined; - node_id: string | undefined; - node_type: string | undefined; - - constructor(supabase: SupabaseClient, context: Context) { - super(supabase, context); - } - - async getUserById(userId: number, issueNumber: number) { - const { data, error } = await this.supabase.from("users").select("*").eq("id", userId).single(); - if (error) { - console.error(FAILED_TO_GET_USER, { userId, error, issueNumber }); - return null; - } - - console.info(SUCCESSFULLY_FETCHED_USER, { userId, issueNumber, ...data }); - return data; - } - - async getWalletByUserId(userId: number, issueNumber: number) { - const { data, error }: { data: { wallets: Wallet } | null; error: unknown } = await this.supabase - .from("users") - .select("wallets(*)") - .eq("id", userId) - .single(); - if ((error && !data) || !data?.wallets?.address) { - console.error("No wallet address found", { userId, issueNumber }, true); - throw new Error("No wallet address found"); - } - - console.info("Successfully fetched wallet", { userId, address: data.wallets?.address }); - return data.wallets?.address; - } - - public async getMultiplier(userId: number, repositoryId: number) { - const locationData = await this.getLocationsFromRepo(repositoryId); - if (locationData && locationData.length > 0) { - const accessData = await this._getAccessData(locationData, userId); - if (accessData) { - return { - value: accessData.multiplier || null, - reason: accessData.multiplier_reason || null, - }; - } - } - return null; - } - - private async _getAccessData(locationData: { id: number }[], userId: number) { - const locationIdsInCurrentRepository = locationData.map((location) => location.id); - - const { data: accessData, error: accessError } = await this.supabase - .from("access") - .select("multiplier, multiplier_reason") - .in("location_id", locationIdsInCurrentRepository) - .eq("user_id", userId) - .order("id", { ascending: false }) // get the latest one - .maybeSingle(); - if (accessError) throw console.error("Error getting access data", accessError); - return accessData; - } - - public async getLocationsFromRepo(repositoryId: number) { - const { data: locationData, error } = await this.supabase.from("locations").select("id").eq("repository_id", repositoryId); - - if (error) throw console.error("Error getting location data", new Error(error.message)); - return locationData; - } -} - -const FAILED_TO_GET_USER = "Failed to get user"; -const SUCCESSFULLY_FETCHED_USER = "Successfully fetched user"; diff --git a/src/adapters/voyage/helpers/embedding.ts b/src/adapters/voyage/helpers/embedding.ts new file mode 100644 index 0000000..68797e2 --- /dev/null +++ b/src/adapters/voyage/helpers/embedding.ts @@ -0,0 +1,26 @@ +import { VoyageAIClient } from "voyageai"; +import { Context } from "../../../types"; +import { SuperVoyage } from "./voyage"; +const VECTOR_SIZE = 1024; + +export class Embedding extends SuperVoyage { + protected context: Context; + + constructor(client: VoyageAIClient, context: Context) { + super(client, context); + this.context = context; + } + + async createEmbedding(input: { text?: string; prompt?: string } = {}): Promise { + const { text = null, prompt = null } = input; + if (text === null) { + return new Array(VECTOR_SIZE).fill(0); + } else { + const response = await this.client.embed({ + input: prompt ? `${prompt} ${text}` : text, + model: "voyage-large-2-instruct", + }); + return (response.data && response.data[0]?.embedding) || []; + } + } +} diff --git a/src/adapters/voyage/helpers/rerankers.ts b/src/adapters/voyage/helpers/rerankers.ts new file mode 100644 index 0000000..9d68aee --- /dev/null +++ b/src/adapters/voyage/helpers/rerankers.ts @@ -0,0 +1,30 @@ +import { VoyageAIClient } from "voyageai"; +import { Context } from "../../../types"; +import { SuperVoyage } from "./voyage"; + +export class Rerankers extends SuperVoyage { + protected context: Context; + + constructor(client: VoyageAIClient, context: Context) { + super(client, context); + this.context = context; + } + + async reRankResults(results: string[], query: string, topK: number = 5): Promise { + let response; + try { + response = await this.client.rerank({ + query, + documents: results, + model: "rerank-2", + returnDocuments: true, + topK, + }); + } catch (e: unknown) { + this.context.logger.error("Reranking failed!", { e }); + return results; + } + const rerankedResults = response.data || []; + return rerankedResults.map((result) => result.document).filter((document): document is string => document !== undefined); + } +} diff --git a/src/adapters/voyage/helpers/voyage.ts b/src/adapters/voyage/helpers/voyage.ts new file mode 100644 index 0000000..c08c0af --- /dev/null +++ b/src/adapters/voyage/helpers/voyage.ts @@ -0,0 +1,12 @@ +import { VoyageAIClient } from "voyageai"; +import { Context } from "../../../types/context"; + +export class SuperVoyage { + protected client: VoyageAIClient; + protected context: Context; + + constructor(client: VoyageAIClient, context: Context) { + this.client = client; + this.context = context; + } +} diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts new file mode 100644 index 0000000..ec4a731 --- /dev/null +++ b/src/handlers/add-comment.ts @@ -0,0 +1,21 @@ +import { Context } from "../types/context"; + +/** + * Add a comment to an issue + * @param context - The context object containing environment and configuration details + * @param message - The message to add as a comment + */ +export async function addCommentToIssue(context: Context, message: string) { + const { payload } = context; + const issueNumber = payload.issue.number; + try { + await context.octokit.issues.createComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + issue_number: issueNumber, + body: message, + }); + } catch (e: unknown) { + context.logger.error("Adding a comment failed!", { e }); + } +} diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts new file mode 100644 index 0000000..30112c3 --- /dev/null +++ b/src/handlers/ask-llm.ts @@ -0,0 +1,73 @@ +import { Context } from "../types"; +import { CompletionsType } from "../adapters/openai/helpers/completions"; +import { CommentSimilaritySearchResult } from "../adapters/supabase/helpers/comment"; +import { IssueSimilaritySearchResult } from "../adapters/supabase/helpers/issues"; +import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; +import { formatChatHistory } from "../helpers/format-chat-history"; +import { optimizeContext } from "../helpers/issue"; + +/** + * Asks a question to GPT and returns the response + * @param context - The context object containing environment and configuration details + * @param question - The question to ask GPT + * @returns The response from GPT + * @throws If no question is provided + */ +export async function askQuestion(context: Context, question: string) { + if (!question) { + throw context.logger.error("No question provided"); + } + const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ + context, + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + }); + const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + context.logger.info(`${formattedChat.join("")}`); + return await askGpt(context, question, formattedChat); +} + +/** + * Asks GPT a question and returns the completions + * @param context - The context object containing environment and configuration details + * @param question - The question to ask GPT + * @param formattedChat - The formatted chat history to provide context to GPT + * @returns completions - The completions generated by GPT + **/ +export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { + const { + env: { UBIQUITY_OS_APP_NAME }, + config: { model, similarityThreshold }, + } = context; + let similarComments: CommentSimilaritySearchResult[] = []; + let similarIssues: IssueSimilaritySearchResult[] = []; + try { + similarComments = (await context.adapters.supabase.comment.findSimilarComments(question, 1 - similarityThreshold, "")) || []; + } catch (error) { + context.logger.error(`Error fetching similar comments: ${(error as Error).message}`); + } + try { + similarIssues = (await context.adapters.supabase.issue.findSimilarIssues(question, 1 - similarityThreshold, "")) || []; + } catch (error) { + context.logger.error(`Error fetching similar issues: ${(error as Error).message}`); + } + let similarText = similarComments.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext); + similarText.push(...similarIssues.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext)); + // Remove Null Results (Private Comments) + similarText = similarText.filter((text) => text !== null); + formattedChat = formattedChat.filter((text) => text !== null); + // Optimize the context + formattedChat = optimizeContext(formattedChat); + // ReRank the results based on the question + // const reRankedChat = formattedChat.length > 0 ? await context.adapters.voyage.reranker.reRankResults(formattedChat.filter(text => text !== ""), question, 300) : []; + similarText = similarText.filter((text) => text !== ""); + const rerankedText = similarText.length > 0 ? await context.adapters.voyage.reranker.reRankResults(similarText, question) : []; + return context.adapters.openai.completions.createCompletion( + question, + model, + rerankedText, + formattedChat, + ["typescript", "github", "cloudflare worker", "actions", "jest", "supabase", "openai"], + UBIQUITY_OS_APP_NAME + ); +} diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts new file mode 100644 index 0000000..cf12053 --- /dev/null +++ b/src/handlers/comments.ts @@ -0,0 +1,95 @@ +import { splitKey } from "../helpers/issue"; +import { LinkedIssues, SimplifiedComment } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; + +/** + * Get all streamlined comments from linked issues + * @param linkedIssues - The linked issues to get comments from + * @returns The streamlined comments which are grouped by issue key + */ +export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { + const streamlinedComments: Record = {}; + for (const issue of linkedIssues) { + const linkedIssueComments = issue.comments || []; + if (linkedIssueComments.length === 0) continue; + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); + if (!linkedStreamlinedComments) continue; + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { + streamlinedComments[key] = [...(streamlinedComments[key] || []), ...value]; + } + } + return streamlinedComments; +} + +/** + * Create a unique key for an issue based on its URL and optional issue number + * @param issueUrl - The URL of the issue + * @param issue - The optional issue number + * @returns The unique key for the issue + */ +export function createKey(issueUrl: string, issue?: number) { + const urlParts = issueUrl.split("/"); + + let key; + + if (urlParts.length === 7) { + const [, , , issueOrg, issueRepo, , issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber}`; + } + + if (urlParts.length === 5) { + const [, , issueOrg, issueRepo] = urlParts; + key = `${issueOrg}/${issueRepo}/${issue}`; + } + + if (urlParts.length === 8) { + const [, , , issueOrg, issueRepo, , , issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + } + + if (urlParts.length === 3) { + const [issueOrg, issueRepo, issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + } + + if (!key) { + throw new Error("Invalid issue url"); + } + + if (key.includes("#")) { + key = key.split("#")[0]; + } + + return key; +} + +/** + * Streamline comments by filtering out bot comments and organizing them by issue key + * @param comments - The comments to streamline + * @returns The streamlined comments grouped by issue key + */ +export function streamlineComments(comments: SimplifiedComment[]) { + const streamlined: Record = {}; + for (const comment of comments) { + const { user, issueUrl: url, body } = comment; + // Skip bot comments + if (user?.type === "Bot") continue; + const key = createKey(url); + const [owner, repo] = splitKey(key); + + if (!streamlined[key]) { + streamlined[key] = []; + } + if (user && body) { + streamlined[key].push({ + user: user.login, + body, + id: parseInt(comment.id, 10), + org: owner, + repo, + issueUrl: url, + }); + } + } + return streamlined; +} diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts new file mode 100644 index 0000000..5c92c63 --- /dev/null +++ b/src/helpers/format-chat-history.ts @@ -0,0 +1,157 @@ +import { Context } from "../types"; +import { StreamlinedComment, StreamlinedComments } from "../types/llm"; +import { createKey, streamlineComments } from "../handlers/comments"; +import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; +import { splitKey } from "./issue"; + +/** + * Formats the chat history by combining streamlined comments and specifications or bodies for issues and pull requests. + * + * @param context - The context object containing information about the current GitHub event. + * @param streamlined - A record of streamlined comments for each issue or pull request. + * @param specAndBodies - A record of specifications or bodies for each issue or pull request. + * @returns A promise that resolves to a formatted string representing the chat history. + */ +export async function formatChatHistory( + context: Context, + streamlined: Record, + specAndBodies: Record +): Promise { + const keys = new Set([...Object.keys(streamlined), ...Object.keys(specAndBodies), createKey(context.payload.issue.html_url)]); + const chatHistory = await Promise.all( + Array.from(keys).map(async (key) => { + const isCurrentIssue = key === createKey(context.payload.issue.html_url); + return createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue); + }) + ); + return Array.from(new Set(chatHistory)); +} + +/** + * Generates the correct header string based on the provided parameters. + * + * @param prDiff - The pull request diff string, if available. + * @param issueNumber - The issue number. + * @param isCurrentIssue - A boolean indicating if this is the current issue. + * @param isBody - A boolean indicating if this is for the body of the issue. + * @returns The formatted header string. + */ +function getCorrectHeaderString(prDiff: string | null, issueNumber: number, isCurrentIssue: boolean, isBody: boolean) { + const headerTemplates = { + pull: `Pull #${issueNumber} Request`, + issue: `Issue #${issueNumber} Specification`, + convo: `Issue #${issueNumber} Conversation`, + }; + + const type = prDiff ? "pull" : "issue"; + const context = isCurrentIssue ? "current" : "linked"; + const bodyContext = isBody ? "convo" : type; + + return `${context.charAt(0).toUpperCase() + context.slice(1)} ${headerTemplates[bodyContext]}`; +} + +/** + * Creates a context block section for the given issue or pull request. + * + * @param context - The context object containing information about the current GitHub event. + * @param key - The unique key representing the issue or pull request. + * @param streamlined - A record of streamlined comments for each issue or pull request. + * @param specAndBodies - A record of specifications or bodies for each issue or pull request. + * @param isCurrentIssue - A boolean indicating whether the key represents the current issue. + * @returns A formatted string representing the context block section. + */ +async function createContextBlockSection( + context: Context, + key: string, + streamlined: Record, + specAndBodies: Record, + isCurrentIssue: boolean +) { + let comments = streamlined[key]; + if (!comments || comments.length === 0) { + const [owner, repo, number] = splitKey(key); + const { comments: fetchedComments } = await fetchIssueComments({ + context, + owner, + repo, + issueNum: parseInt(number), + }); + comments = streamlineComments(fetchedComments)[key]; + } + const [org, repo, issueNum] = key.split("/"); + const issueNumber = parseInt(issueNum); + if (!issueNumber || isNaN(issueNumber)) { + throw context.logger.error("Issue number is not valid"); + } + const prDiff = await fetchPullRequestDiff(context, org, repo, issueNumber); + const specHeader = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, false); + let specOrBody = specAndBodies[key]; + if (!specOrBody) { + specOrBody = + ( + await fetchIssue({ + context, + owner: org, + repo, + issueNum: issueNumber, + }) + )?.body || "No specification or body available"; + } + const specOrBodyBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader)]; + const header = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, true); + const repoString = `${org}/${repo} #${issueNumber}`; + const block = [specOrBodyBlock.join(""), createHeader(header, repoString), createComment({ issueNumber, repo, org, comments }), createFooter(header)]; + if (!prDiff) { + return block.join(""); + } + const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), prDiff, createFooter("Linked Pull Request Code Diff")]; + return block.concat(diffBlock).join(""); +} + +/** + * Creates a header string for the given content and repository string. + * + * @param content - The content to include in the header. + * @param repoString - The repository string to include in the header. + * @returns A formatted header string. + */ +function createHeader(content: string, repoString: string) { + return `=== ${content} === ${repoString} ===\n\n`; +} + +/** + * Creates a footer string for the given content. + * + * @param content - The content to include in the footer. + * @returns A formatted footer string. + */ +function createFooter(content: string) { + return `=== End ${content} ===\n\n`; +} + +/** + * Creates a comment string from the StreamlinedComments object. + * + * @param comment - The StreamlinedComments object. + * @returns A string representing the comments. + */ +function createComment(comment: StreamlinedComments) { + if (!comment.comments) { + return ""; + } + // Remove duplicates + const uniqueComments = comment.comments.filter((c, i, a) => a.findIndex((cc) => cc.id === c.id) === i); + // Format comments + const formattedComments = uniqueComments.map((c) => `${c.id} ${c.user}: ${c.body}\n`); + return formattedComments.join(""); +} + +/** + * Creates a formatted string for the specification or body of an issue. + * + * @param specOrBody - The specification or body content. + * @returns A formatted string representing the specification or body. + */ +function createSpecOrBody(specOrBody: string) { + return `${specOrBody}\n`; +} diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts new file mode 100644 index 0000000..486d83e --- /dev/null +++ b/src/helpers/issue-fetching.ts @@ -0,0 +1,299 @@ +import { createKey, getAllStreamlinedComments } from "../handlers/comments"; +import { Context } from "../types"; +import { IssueWithUser, SimplifiedComment, User } from "../types/github-types"; +import { FetchParams, Issue, Comments, LinkedIssues } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; +import { + dedupeStreamlinedComments, + fetchCodeLinkedFromIssue, + idIssueFromComment, + mergeStreamlinedComments, + pullReadmeFromRepoForIssue, + splitKey, +} from "./issue"; +import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; + +/** + * Recursively fetches linked issues and processes them, including fetching comments and specifications. + * + * @param params - The parameters required to fetch the linked issues, including context and other details. + * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. + */ +export async function recursivelyFetchLinkedIssues(params: FetchParams) { + const { linkedIssues, seen, specAndBodies, streamlinedComments } = await fetchLinkedIssues(params); + const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); + await throttlePromises(fetchPromises, 10); + const linkedIssuesKeys = linkedIssues.map((issue) => createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`)); + const specAndBodyKeys = Array.from(new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); + await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); + return { linkedIssues, specAndBodies, streamlinedComments }; +} + +/** + * Fetches linked issues recursively and processes them. + * + * @param params - The parameters required to fetch the linked issues, including context and other details. + * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. + */ +export async function fetchLinkedIssues(params: FetchParams) { + const { comments, issue } = await fetchIssueComments(params); + if (!issue) { + return { streamlinedComments: {}, linkedIssues: [], specAndBodies: {}, seen: new Set() }; + } + if (!issue.body || !issue.html_url) { + throw new Error("Issue body or URL not found"); + } + + if (!params.owner || !params.repo) { + throw new Error("Owner, repo, or issue number not found"); + } + const issueKey = createKey(issue.html_url); + const [owner, repo, issueNumber] = splitKey(issueKey); + const linkedIssues: LinkedIssues[] = [{ body: issue.body, comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; + const specAndBodies: Record = {}; + const seen = new Set([issueKey]); + + comments.push({ + body: issue.body, + user: issue.user as User, + id: issue.id.toString(), + org: params.owner, + repo: params.repo, + issueUrl: issue.html_url, + }); + + //Fetch the README of the repository + try { + const readme = await pullReadmeFromRepoForIssue(params); + if (readme) { + comments.push({ + body: readme, + user: issue.user as User, + id: issue.id.toString(), + org: params.owner, + repo: params.repo, + issueUrl: issue.html_url, + }); + } + } catch (error) { + params.context.logger.error(`Error fetching README`, { + error: error as Error, + owner, + repo, + issue, + }); + } + + for (const comment of comments) { + const foundIssues = idIssueFromComment(comment.body); + const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; + + if (foundIssues) { + for (const linkedIssue of foundIssues) { + const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); + if (seen.has(linkedKey)) continue; + + seen.add(linkedKey); + const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ + context: params.context, + issueNum: linkedIssue.issueNumber, + owner: linkedIssue.owner, + repo: linkedIssue.repo, + }); + + if (!fetchedIssue || !fetchedIssue.body) { + continue; + } + + specAndBodies[linkedKey] = fetchedIssue?.body; + linkedIssue.body = fetchedIssue?.body; + linkedIssue.comments = fetchedComments; + linkedIssues.push(linkedIssue); + } + } + + if (foundCodes) { + for (const code of foundCodes) { + comments.push({ + body: code.body, + user: code.user, + id: code.id, + org: code.org, + repo: code.repo, + issueUrl: code.issueUrl, + }); + } + } + } + + const streamlinedComments = await getAllStreamlinedComments(linkedIssues); + return { streamlinedComments, linkedIssues, specAndBodies, seen }; +} + +/** + * Merges comments and fetches the specification for a linked issue. + * + * @param params - The parameters required to fetch the linked issue, including context and other details. + * @param linkedIssue - The linked issue for which comments and specifications need to be fetched. + * @param streamlinedComments - A record of streamlined comments associated with issues. + * @param specOrBodies - A record of specifications or bodies associated with issues. + * @param seen - A set of issue keys that have already been processed to avoid duplication. + */ +export async function mergeCommentsAndFetchSpec( + params: FetchParams, + linkedIssue: LinkedIssues, + streamlinedComments: Record, + specOrBodies: Record, + seen: Set +) { + if (linkedIssue.comments) { + const streamed = await getAllStreamlinedComments([linkedIssue]); + const merged = mergeStreamlinedComments(streamlinedComments, streamed); + streamlinedComments = { ...streamlinedComments, ...merged }; + } + if (linkedIssue.body) { + await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); + } +} + +/** + * Fetches the diff of a pull request. + * + * @param context - The context containing the octokit instance and logger. + * @param org - The organization or owner of the repository. + * @param repo - The name of the repository. + * @param issue - The pull request number. + * @returns A promise that resolves to the diff of the pull request as a string, or null if an error occurs. + */ +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number): Promise { + const { octokit, logger } = context; + try { + const { data } = await octokit.pulls.get({ + owner: org, + repo, + pull_number: issue, + mediaType: { + format: "diff", + }, + }); + return data as unknown as string; + } catch (error) { + logger.error(`Error fetching pull request diff`, { + error: error as Error, + owner: org, + repo, + pull_number: issue, + }); + return null; + } +} + +/** + * Fetches the details of a pull request. + * + * @param params - The parameters required to fetch the pull request, including context and other details. + * @returns A promise that resolves to the pull request details or null if an error occurs. + */ +export async function fetchIssue(params: FetchParams): Promise { + const { octokit, payload, logger } = params.context; + const { issueNum, owner, repo } = params; + try { + const response = await octokit.rest.issues.get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + return response.data as IssueWithUser; + } catch (error) { + logger.error(`Error fetching issue`, { + error: error as Error, + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + return null; + } +} + +/** + * Fetches the comments for a given issue or pull request. + * + * @param params - The parameters required to fetch the issue comments, including context and other details. + * @returns A promise that resolves to an object containing the issue and its comments. + */ +export async function fetchIssueComments(params: FetchParams) { + const { octokit, payload, logger } = params.context; + const { issueNum, owner, repo } = params; + const issue = await fetchIssue(params); + let comments: Comments = []; + try { + if (issue?.pull_request) { + const response = await octokit.rest.pulls.listReviewComments({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + pull_number: issueNum || payload.issue.number, + }); + comments = response.data; + } else { + const response = await octokit.rest.issues.listComments({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + comments = response.data; + } + } catch (e) { + logger.error(`Error fetching comments `, { + e, + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + comments = []; + } + comments = comments.filter((comment) => comment.user?.type !== "Bot") as Comments; + const simplifiedComments = castCommentsToSimplifiedComments(comments, params); + + return { + issue, + comments: simplifiedComments, + }; +} + +/** + * Fetches and handles an issue based on the provided key and parameters. + * + * @param key - The unique key representing the issue in the format "owner/repo/issueNumber". + * @param params - The parameters required to fetch the issue, including context and other details. + * @param streamlinedComments - A record of streamlined comments associated with issues. + * @param seen - A set of issue keys that have already been processed to avoid duplication. + * @returns A promise that resolves to an array of streamlined comments for the specified issue. + */ +export async function fetchAndHandleIssue( + key: string, + params: FetchParams, + streamlinedComments: Record, + seen: Set +): Promise { + const [owner, repo, issueNumber] = splitKey(key); + const issueParams = { ...params, owner, repo, issueNum: parseInt(issueNumber) }; + await handleIssue(issueParams, streamlinedComments, seen); + return streamlinedComments[key] || []; +} + +function castCommentsToSimplifiedComments(comments: Comments, params: FetchParams): SimplifiedComment[] { + if (!comments) { + return []; + } + return comments + .filter((comment) => comment.body !== undefined) + .map((comment) => ({ + id: comment.id.toString(), + org: params.owner || params.context.payload.repository.owner.login, + repo: params.repo || params.context.payload.repository.name, + issueUrl: comment.html_url, + body: comment.body as string, + user: comment.user as User, + url: comment.html_url, + })); +} diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts new file mode 100644 index 0000000..3f44225 --- /dev/null +++ b/src/helpers/issue-handling.ts @@ -0,0 +1,143 @@ +import { createKey } from "../handlers/comments"; +import { FetchParams } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; +import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; +import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, mergeCommentsAndFetchSpec } from "./issue-fetching"; + +/** + * Handles the processing of an issue. + * + * @param params - The parameters required to fetch and handle issues. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @param alreadySeen - A set of keys that have already been processed to avoid duplication. + * @returns A promise that resolves when the issue has been handled. + */ +export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen: Set) { + if (alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { + return; + } + const { linkedIssues, seen, specAndBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); + const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); + await throttlePromises(fetchPromises, 10); + return mergeStreamlinedComments(streamlinedComments, streamlined); +} + +/** + * Handles the processing of a specification or body text. + * + * @param params - The parameters required to fetch and handle issues. + * @param specOrBody - The specification or body text to be processed. + * @param specAndBodies - A record of specifications and bodies indexed by keys. + * @param key - The key associated with the current specification or body. + * @param seen - A set of keys that have already been processed to avoid duplication. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @returns A promise that resolves to the updated record of specifications and bodies. + */ +export async function handleSpec( + params: FetchParams, + specOrBody: string, + specAndBodies: Record, + key: string, + seen: Set, + streamlinedComments: Record +) { + specAndBodies[key] = specOrBody; + const otherReferences = idIssueFromComment(specOrBody); + if (otherReferences) { + for (const ref of otherReferences) { + const anotherKey = createKey(ref.url, ref.issueNumber); + if (seen.has(anotherKey)) { + return; + } + seen.add(anotherKey); + const issue = await fetchIssue({ + ...params, + owner: ref.owner, + repo: ref.repo, + issueNum: ref.issueNumber, + }); + if (!issue?.body) { + return; + } + + if (issue?.body) { + specAndBodies[anotherKey] = issue.body; + } + const [owner, repo, issueNum] = splitKey(anotherKey); + if (!streamlinedComments[anotherKey]) { + await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); + await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue?.body, specAndBodies, anotherKey, seen, streamlinedComments); + } + } + } + return specAndBodies; +} + +/** + * Handles the processing of a comment. + * + * @param params - The parameters required to fetch and handle issues. + * @param comment - The comment to be processed. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @param seen - A set of keys that have already been processed to avoid duplication. + */ +export async function handleComment( + params: FetchParams, + comment: StreamlinedComment, + streamlinedComments: Record, + seen: Set +) { + const otherReferences = idIssueFromComment(comment.body); + if (otherReferences) { + for (const ref of otherReferences) { + const key = createKey(ref.url); + const [refOwner, refRepo, refIssueNumber] = splitKey(key); + + if (!streamlinedComments[key]) { + await handleIssue({ ...params, owner: refOwner, repo: refRepo, issueNum: parseInt(refIssueNumber) }, streamlinedComments, seen); + } + } + } +} + +/** + * Handles the processing of specification and body keys. + * + * @param keys - An array of keys representing issues or comments to be processed. + * @param params - The parameters required to fetch and handle issues. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @param seen - A set of keys that have already been processed to avoid duplication. + */ +export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { + const commentProcessingPromises = keys.map(async (key) => { + let comments = streamlinedComments[key]; + if (!comments || comments.length === 0) { + comments = await fetchAndHandleIssue(key, params, streamlinedComments, seen); + } + + for (const comment of comments) { + await handleComment(params, comment, streamlinedComments, seen); + } + }); + await throttlePromises(commentProcessingPromises, 10); +} + +/** + * Throttles the execution of promises to ensure that no more than the specified limit are running concurrently. + * + * @param promises - An array of promises to be executed. + * @param limit - The maximum number of promises to run concurrently. + */ +export async function throttlePromises(promises: Promise[], limit: number) { + const executing: Promise[] = []; + for (const promise of promises) { + const p = promise.then(() => { + void executing.splice(executing.indexOf(p), 1); + }); + executing.push(p); + if (executing.length >= limit) { + await Promise.race(executing); + } + } + await Promise.all(executing); +} diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts new file mode 100644 index 0000000..2c76179 --- /dev/null +++ b/src/helpers/issue.ts @@ -0,0 +1,246 @@ +import { createKey } from "../handlers/comments"; +import { FetchedCodes, FetchParams, LinkedIssues } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; +import { Context } from "../types/context"; // Import Context type + +/** + * Removes duplicate streamlined comments based on their body content. + * + * @param streamlinedComments - The record of streamlined comments to deduplicate. + * @returns The deduplicated record of streamlined comments. + */ +export function dedupeStreamlinedComments(streamlinedComments: Record) { + for (const key of Object.keys(streamlinedComments)) { + streamlinedComments[key] = streamlinedComments[key].filter( + (comment: StreamlinedComment, index: number, self: StreamlinedComment[]) => index === self.findIndex((t: StreamlinedComment) => t.body === comment.body) + ); + } + return streamlinedComments; +} + +/** + * Merges new streamlined comments into existing streamlined comments. + * + * @param existingComments - The existing comments to merge into. + * @param newComments - The new comments to merge. + * @returns The merged comments. + */ +export function mergeStreamlinedComments(existingComments: Record, newComments: Record) { + if (!existingComments) { + existingComments = {}; + } + for (const [key, value] of Object.entries(newComments)) { + if (!existingComments[key]) { + existingComments[key] = []; + } + const previous = existingComments[key] || []; + existingComments[key] = [...previous, ...value]; + } + return existingComments; +} + +/** + * Extracts the owner, repository, and issue number from a given key. + * + * @param key - The key string in the format "owner/repo/issueNumber". + * @returns A tuple containing the owner, repository, and issue number. + */ +export function splitKey(key: string): [string, string, string] { + const parts = key.split("/"); + return [parts[0], parts[1], parts[2]]; +} + +/** + * Identifies issues from a comment string. + * + * @param comment - The comment string that may contain issue references. + * @param params - Additional parameters that may include context information. + * @returns An array of linked issues or null if no issues are found. + */ +export function idIssueFromComment(comment?: string | null): LinkedIssues[] | null { + const urlMatch = comment?.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+)\/(pull|issues?)\/(\d+)/g); + const response: LinkedIssues[] = []; + + if (urlMatch) { + urlMatch.forEach((url) => { + response.push(createLinkedIssueOrPr(url)); + }); + } + + return response.length > 0 ? response : null; +} + +/** + * Creates a linked issue or pull request object from a given GitHub URL. + * + * @param url - The GitHub URL to create the linked issue or pull request from. + * @returns An object representing the linked issue or pull request. + */ +function createLinkedIssueOrPr(url: string): LinkedIssues { + const key = createKey(url); + const [owner, repo, issueNumber] = splitKey(key); + return { + owner, + repo, + issueNumber: parseInt(issueNumber), + url, + }; +} + +/** + * Fetches the code linked from a GitHub issue. + * + * @param issue - The issue string containing GitHub URLs. + * @param context - The context object containing the octokit instance. + * @param url - The URL of the issue. + * @param extensions - The list of file extensions to filter the linked files. + * @returns A promise that resolves to an array of fetched codes. + */ +export async function fetchCodeLinkedFromIssue( + issue: string, + context: Context, + url: string, + extensions: string[] = [".ts", ".json", ".sol"] +): Promise { + const { octokit } = context; + // Function to extract owner, repo, and path from a GitHub URL + function parseGitHubUrl(url: string): { owner: string; repo: string; path: string } | null { + const match = url.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+)\/blob\/[^/]+\/(.+)/); + return match ? { owner: match[1], repo: match[2], path: match[3] } : null; + } + // Function to check if a file has one of the specified extensions + function hasValidExtension(path: string) { + const cleanPath = path.split("#")[0]; // Remove any fragment identifiers like #L39-L49 + return extensions.some((ext) => cleanPath.toLowerCase().endsWith(ext.toLowerCase())); + } + //Function to remove Line numbers from the URL + function removeLineNumbers(url: string) { + const match = url.match(/(.*?)(#L\d+(-L\d+)?)/); + return match ? match[1] : url; + } + // Extract all GitHub URLs from the issue + const urls = issue.match(/https?:\/\/(www\.)?github\.com\/[^\s]+/g) || []; + // Process each URL + const results = await Promise.all( + urls.map(async (url) => { + let parsedUrl = parseGitHubUrl(url); + parsedUrl = parsedUrl ? { ...parsedUrl, path: removeLineNumbers(parsedUrl.path) } : null; + if (!parsedUrl || !hasValidExtension(parsedUrl.path)) return null; + try { + //Parse the commit sha from the URL + const commitSha = url.match(/https?:\/\/github\.com\/[^/]+\/[^/]+\/blob\/([^/]+)\/.+/); + let response; + if (commitSha) { + response = await octokit.repos.getContent({ + owner: parsedUrl.owner, + repo: parsedUrl.repo, + ref: commitSha ? commitSha[1] : "main", + path: parsedUrl.path, + }); + } else { + response = await octokit.repos.getContent({ + owner: parsedUrl.owner, + repo: parsedUrl.repo, + path: parsedUrl.path, + }); + } + + if ("content" in response.data) { + const content = Buffer.from(response.data.content, "base64").toString(); + return { body: content, id: parsedUrl.path }; + } + } catch (error) { + console.error(`Error fetching content from ${url}:`, error); + } + return null; + }) + ); + return results + .filter((result): result is { body: string; id: string } => result !== null) + .map((result) => ({ + ...result, + org: context.payload.repository.owner.login, + repo: context.payload.repository.name, + issueNumber: parseInt(issue.match(/\/issues\/(\d+)/)?.[1] || "0", 10), + issueUrl: url, + user: null, + })); +} + +/** + * Optimizes the context strings by removing duplicates and sorting by information density. + * Removes exact duplicates and sorts by information density and length. + * + * @param strings - The array of context strings to optimize. + * @returns The optimized array of context strings. + */ +export function optimizeContext(strings: string[]): string[] { + // Helper function to clean strings while preserving links + function cleanString(inputString: string): string { + // Preserve links by temporarily replacing them + const links: string[] = []; + inputString = inputString.replace(/https?:\/\/\S+/g, (match) => { + links.push(match); + return `__LINK${links.length - 1}__`; + }); + // Clean the string + inputString = inputString + .replace(/[^\w\s-/]|_/g, "") // Remove punctuation except '-' and '/' + .replace(/\s+/g, " ") + .trim() + .toLowerCase(); + // Restore links + inputString = inputString.replace(/__LINK(\d+)__/g, (i) => links[parseInt(i)]); + + return inputString; + } + // Helper function to calculate information density + function informationDensity(s: string): number { + const words = s.split(/\s+/); + const uniqueWords = new Set(words); + return uniqueWords.size / words.length; + } + // Clean and remove empty strings + const cleanedStrings = strings.map(cleanString).filter((s) => s.length > 0); + // Remove exact duplicates + const uniqueStrings = Array.from(new Set(cleanedStrings)); + // Sort strings by information density and length + uniqueStrings.sort((a, b) => { + const densityDiff = informationDensity(b) - informationDensity(a); + return densityDiff !== 0 ? densityDiff : b.length - a.length; + }); + const result: string[] = []; + const wordSet = new Set(); + for (const str of uniqueStrings) { + const words = str.split(/\s+/); + const newWords = words.filter((word) => !wordSet.has(word) && !word.startsWith("http")); + if (newWords.length > 0 || str.includes("http")) { + result.push(str); + newWords.forEach((word) => wordSet.add(word)); + } + } + return result; +} + +/** + * Extracts and returns the README content from the repository associated with the given issue. + * + * @param params - The parameters required to fetch the README, including the context with octokit instance. + * @returns The content of the README file as a string. + */ +export async function pullReadmeFromRepoForIssue(params: FetchParams): Promise { + let readme; + try { + const response = await params.context.octokit.repos.getContent({ + owner: params.context.payload.repository.owner?.login || params.context.payload.organization?.login || "", + repo: params.context.payload.repository.name, + path: "README.md", + }); + if ("content" in response.data) { + readme = Buffer.from(response.data.content, "base64").toString(); + } + } catch (error) { + throw new Error(`Error fetching README from repository: ${error}`); + } + return readme; +} diff --git a/src/main.ts b/src/main.ts deleted file mode 100644 index b46765e..0000000 --- a/src/main.ts +++ /dev/null @@ -1,55 +0,0 @@ -import * as core from "@actions/core"; -import * as github from "@actions/github"; -import { Octokit } from "@octokit/rest"; -import { Value } from "@sinclair/typebox/value"; -import { envSchema, pluginSettingsSchema, PluginInputs, pluginSettingsValidator } from "./types"; -import { plugin } from "./plugin"; - -/** - * How a GitHub action executes the plugin. - */ -export async function run() { - const payload = github.context.payload.inputs; - - const env = Value.Decode(envSchema, payload.env); - const settings = Value.Decode(pluginSettingsSchema, Value.Default(pluginSettingsSchema, JSON.parse(payload.settings))); - - if (!pluginSettingsValidator.test(settings)) { - throw new Error("Invalid settings provided"); - } - - const inputs: PluginInputs = { - stateId: payload.stateId, - eventName: payload.eventName, - eventPayload: JSON.parse(payload.eventPayload), - settings, - authToken: payload.authToken, - ref: payload.ref, - }; - - await plugin(inputs, env); - - return returnDataToKernel(inputs.authToken, inputs.stateId, {}); -} - -async function returnDataToKernel(repoToken: string, stateId: string, output: object) { - const octokit = new Octokit({ auth: repoToken }); - await octokit.repos.createDispatchEvent({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - event_type: "return_data_to_ubiquibot_kernel", - client_payload: { - state_id: stateId, - output: JSON.stringify(output), - }, - }); -} - -run() - .then((result) => { - core.setOutput("result", result); - }) - .catch((error) => { - console.error(error); - core.setFailed(error); - }); diff --git a/src/plugin.ts b/src/plugin.ts index c790042..8eab234 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,47 +1,83 @@ import { Octokit } from "@octokit/rest"; -import { createClient } from "@supabase/supabase-js"; -import { createAdapters } from "./adapters"; -import { Env, PluginInputs } from "./types"; +import { PluginInputs } from "./types"; import { Context } from "./types"; +import { askQuestion } from "./handlers/ask-llm"; +import { addCommentToIssue } from "./handlers/add-comment"; +import { LogLevel, LogReturn, Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Env } from "./types/env"; +import { createAdapters } from "./adapters"; +import { createClient } from "@supabase/supabase-js"; +import { VoyageAIClient } from "voyageai"; +import OpenAI from "openai"; -/** - * How a worker executes the plugin. - */ export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); const supabase = createClient(env.SUPABASE_URL, env.SUPABASE_KEY); - + const voyageClient = new VoyageAIClient({ + apiKey: env.VOYAGEAI_API_KEY, + }); + const openAiObject = { + apiKey: env.OPENAI_API_KEY, + ...(inputs.settings.openAiBaseUrl && { baseURL: inputs.settings.openAiBaseUrl }), + }; + const openaiClient = new OpenAI(openAiObject); const context: Context = { eventName: inputs.eventName, payload: inputs.eventPayload, config: inputs.settings, octokit, env, - logger: { - debug(message: unknown, ...optionalParams: unknown[]) { - console.debug(message, ...optionalParams); - }, - info(message: unknown, ...optionalParams: unknown[]) { - console.log(message, ...optionalParams); - }, - warn(message: unknown, ...optionalParams: unknown[]) { - console.warn(message, ...optionalParams); - }, - error(message: unknown, ...optionalParams: unknown[]) { - console.error(message, ...optionalParams); - }, - fatal(message: unknown, ...optionalParams: unknown[]) { - console.error(message, ...optionalParams); - }, - }, + logger: new Logs("info" as LogLevel), adapters: {} as ReturnType, }; + context.adapters = createAdapters(supabase, voyageClient, openaiClient, context); + return runPlugin(context); +} - context.adapters = createAdapters(supabase, context); - - if (context.eventName === "issue_comment.created") { - // do something - } else { - context.logger.error(`Unsupported event: ${context.eventName}`); +export async function runPlugin(context: Context) { + const { + logger, + env: { UBIQUITY_OS_APP_NAME }, + } = context; + const question = context.payload.comment.body; + const slugRegex = new RegExp(`@${UBIQUITY_OS_APP_NAME} `, "gi"); + if (!question.match(slugRegex)) { + logger.info("Comment does not mention the app. Skipping."); + return; + } + if (context.payload.comment.user?.type === "Bot") { + logger.info("Comment is from a bot. Skipping."); + return; + } + if (question.replace(slugRegex, "").trim().length === 0) { + logger.info("Comment is empty. Skipping."); + return; } + logger.info(`Asking question: ${question}`); + let commentToPost; + try { + const response = await askQuestion(context, question); + const { answer, tokenUsage } = response; + if (!answer) { + throw logger.error(`No answer from OpenAI`); + } + logger.info(`Answer: ${answer}`, { tokenUsage }); + const tokens = `\n\n`; + commentToPost = answer + tokens; + } catch (err) { + let errorMessage; + if (err instanceof LogReturn) { + errorMessage = err; + } else if (err instanceof Error) { + errorMessage = context.logger.error(err.message, { error: err, stack: err.stack }); + } else { + errorMessage = context.logger.error("An error occurred", { err }); + } + commentToPost = `${errorMessage?.logMessage.diff}\n`; + } + await addCommentToIssue(context, commentToPost); +} + +function sanitizeMetadata(obj: LogReturn["metadata"]): string { + return JSON.stringify(obj, null, 2).replace(//g, ">").replace(/--/g, "--"); } diff --git a/src/types/context.ts b/src/types/context.ts index 45a0266..73f74b7 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -1,10 +1,11 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; -import { createAdapters } from "../adapters"; -import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; +import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Env } from "./env"; +import { createAdapters } from "../adapters"; -export type SupportedEventsU = "issue_comment.created"; // Add more events here +export type SupportedEventsU = "issue_comment.created"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; @@ -14,14 +15,8 @@ export interface Context; - adapters: ReturnType; config: PluginSettings; env: Env; - logger: { - fatal: (message: unknown, ...optionalParams: unknown[]) => void; - error: (message: unknown, ...optionalParams: unknown[]) => void; - warn: (message: unknown, ...optionalParams: unknown[]) => void; - info: (message: unknown, ...optionalParams: unknown[]) => void; - debug: (message: unknown, ...optionalParams: unknown[]) => void; - }; + logger: Logs; + adapters: ReturnType; } diff --git a/src/types/env.ts b/src/types/env.ts index 512e64e..d548e9d 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -1,9 +1,20 @@ import { Type as T } from "@sinclair/typebox"; import { StaticDecode } from "@sinclair/typebox"; -import "dotenv/config"; import { StandardValidator } from "typebox-validators"; +import dotenv from "dotenv"; +dotenv.config(); +/** + * Define sensitive environment variables here. + * + * These are fed into the worker/workflow as `env` and are + * taken from either `dev.vars` or repository secrets. + * They are used with `process.env` but are type-safe. + */ export const envSchema = T.Object({ + OPENAI_API_KEY: T.String(), + UBIQUITY_OS_APP_NAME: T.String({ default: "UbiquityOS" }), + VOYAGEAI_API_KEY: T.String(), SUPABASE_URL: T.String(), SUPABASE_KEY: T.String(), }); diff --git a/src/types/github-types.d.ts b/src/types/github-types.d.ts new file mode 100644 index 0000000..55e3824 --- /dev/null +++ b/src/types/github-types.d.ts @@ -0,0 +1,46 @@ +import { RestEndpointMethodTypes } from "@octokit/rest"; +import { Context } from "./context"; + +export type Issue = RestEndpointMethodTypes["issues"]["get"]["response"]["data"]; +export type Comments = + | RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"] + | RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"]; +export type User = RestEndpointMethodTypes["users"]["getByUsername"]["response"]["data"]; + +//Modify the Issue add User Type +export type IssueWithUser = Issue & { user: User }; + +export type FetchParams = { + context: Context; + issueNum?: number; + owner?: string; + repo?: string; +}; + +export type LinkedIssues = { + issueNumber: number; + repo: string; + owner: string; + url: string; + comments?: SimplifiedComment[] | null | undefined; + body?: string; +}; + +export type SimplifiedComment = { + user: User | null; + body: string; + id: string; + org: string; + repo: string; + issueUrl: string; +}; + +export type FetchedCodes = { + body: string; + user: User | null; + issueUrl: string; + id: string; + org: string; + repo: string; + issueNumber: number; +}; diff --git a/src/types/index.ts b/src/types/index.ts index 6ca5c88..4bcbbe7 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -1,3 +1,2 @@ export * from "./context"; -export * from "./env"; export * from "./plugin-inputs"; diff --git a/src/types/llm.d.ts b/src/types/llm.d.ts new file mode 100644 index 0000000..5bfaa19 --- /dev/null +++ b/src/types/llm.d.ts @@ -0,0 +1,19 @@ +export type StreamlinedComment = { + id: number; + user?: string; + body?: string; + org: string; + repo: string; + issueUrl: string; + specOrBody?: { + html: string; + text: string; + }; +}; + +export type StreamlinedComments = { + issueNumber: number; + repo: string; + org: string; + comments: StreamlinedComment[]; +}; diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 00d0a52..a98f0be 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -18,7 +18,13 @@ export interface PluginInputs; diff --git a/src/worker.ts b/src/worker.ts index 3048b5d..b713c77 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -1,10 +1,20 @@ import { Value } from "@sinclair/typebox/value"; +import { pluginSettingsSchema, pluginSettingsValidator } from "./types"; +import { Env, envValidator } from "./types/env"; +import manifest from "../manifest.json"; import { plugin } from "./plugin"; -import { Env, envValidator, pluginSettingsSchema, pluginSettingsValidator } from "./types"; export default { async fetch(request: Request, env: Env): Promise { try { + if (request.method === "GET") { + const url = new URL(request.url); + if (url.pathname === "/manifest.json") { + return new Response(JSON.stringify(manifest), { + headers: { "content-type": "application/json" }, + }); + } + } if (request.method !== "POST") { return new Response(JSON.stringify({ error: `Only POST requests are supported.` }), { status: 405, @@ -18,10 +28,9 @@ export default { headers: { "content-type": "application/json" }, }); } - const webhookPayload = await request.json(); const settings = Value.Decode(pluginSettingsSchema, Value.Default(pluginSettingsSchema, webhookPayload.settings)); - + const decodedEnv = Value.Decode(envValidator.schema, Value.Default(envValidator.schema, env)); if (!pluginSettingsValidator.test(settings)) { const errors: string[] = []; for (const error of pluginSettingsValidator.errors(settings)) { @@ -33,9 +42,9 @@ export default { headers: { "content-type": "application/json" }, }); } - if (!envValidator.test(env)) { + if (!envValidator.test(decodedEnv)) { const errors: string[] = []; - for (const error of envValidator.errors(env)) { + for (const error of envValidator.errors(decodedEnv)) { console.error(error); errors.push(`${error.path}: ${error.message}`); } @@ -46,7 +55,7 @@ export default { } webhookPayload.settings = settings; - await plugin(webhookPayload, env); + await plugin(webhookPayload, decodedEnv); return new Response(JSON.stringify("OK"), { status: 200, headers: { "content-type": "application/json" } }); } catch (error) { return handleUncaughtError(error); diff --git a/supabase/.gitignore b/supabase/.gitignore new file mode 100644 index 0000000..a3ad880 --- /dev/null +++ b/supabase/.gitignore @@ -0,0 +1,4 @@ +# Supabase +.branches +.temp +.env diff --git a/supabase/config.toml b/supabase/config.toml new file mode 100644 index 0000000..301507c --- /dev/null +++ b/supabase/config.toml @@ -0,0 +1,161 @@ +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "command-ask" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API +# endpoints. public and storage are always included. +schemas = ["public", "storage", "graphql_public"] +# Extra schemas to add to the search_path of every request. public is always included. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = false +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv6) +# ip_version = "IPv6" +# The maximum length in bytes of HTTP request headers. (default: 4096) +# max_header_length = 4096 + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://127.0.0.1" +# OpenAI API Key to use for Supabase AI in the Supabase Studio. +openai_api_key = "env(OPENAI_API_KEY)" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://127.0.0.1:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://127.0.0.1:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true +# Allow/disallow testing manual linking of accounts +enable_manual_linking = false + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false + +# Uncomment to customize email template +# [auth.email.template.invite] +# subject = "You have been invited" +# content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = true +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ .Code }} ." + +# Use pre-defined map of phone number to OTP for testing. +[auth.sms.test_otp] +# 4152127777 = "123456" + +# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. +[auth.hook.custom_access_token] +# enabled = true +# uri = "pg-functions:////" + + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = false +account_sid = "" +message_service_sid = "" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.apple] +enabled = false +client_id = "" +# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: +secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" +# Overrides the default auth redirectUrl. +redirect_uri = "" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "" + +[analytics] +enabled = false +port = 54327 +vector_port = 54328 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "env(S3_HOST)" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "env(S3_REGION)" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "env(S3_ACCESS_KEY)" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/supabase/migrations/20241005200943_comments_function.sql b/supabase/migrations/20241005200943_comments_function.sql new file mode 100644 index 0000000..dac641c --- /dev/null +++ b/supabase/migrations/20241005200943_comments_function.sql @@ -0,0 +1,119 @@ +CREATE OR REPLACE FUNCTION find_similar_issue_ftse( + current_id VARCHAR, + query_text TEXT, + query_embedding VECTOR(1024), + threshold DOUBLE PRECISION, + max_results INTEGER DEFAULT 10 +) +RETURNS TABLE( + issue_id VARCHAR, + issue_plaintext TEXT, + similarity DOUBLE PRECISION, + text_similarity DOUBLE PRECISION +) AS $$ +DECLARE + query_tokens TEXT[]; + query_tsquery TSQUERY; +BEGIN + -- Generate query tokens + SELECT array_agg(DISTINCT lower(word)) + INTO query_tokens + FROM unnest(regexp_split_to_array(query_text, '\s+')) AS word + WHERE length(word) > 2; + + -- Create tsquery from tokens + SELECT to_tsquery(string_agg(lexeme || ':*', ' | ')) + INTO query_tsquery + FROM unnest(query_tokens) lexeme; + + RETURN QUERY + WITH vector_similarity AS ( + SELECT + id, + plaintext, + (1 - (embedding <-> query_embedding))::DOUBLE PRECISION AS vec_similarity + FROM issues + WHERE id <> current_id + AND (1 - (embedding <-> query_embedding))::DOUBLE PRECISION > threshold + ), + text_similarity AS ( + SELECT + id, + plaintext, + ts_rank(to_tsvector('english', plaintext), query_tsquery)::DOUBLE PRECISION AS text_sim + FROM issues + WHERE to_tsvector('english', plaintext) @@ query_tsquery + ) + SELECT + vs.id AS issue_id, + vs.plaintext AS issue_plaintext, + vs.vec_similarity AS similarity, + COALESCE(ts.text_sim, 0::DOUBLE PRECISION) AS text_similarity + FROM vector_similarity vs + LEFT JOIN text_similarity ts ON vs.id = ts.id + ORDER BY (vs.vec_similarity + COALESCE(ts.text_sim, 0::DOUBLE PRECISION)) DESC + LIMIT max_results; +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION find_similar_comments( + current_id VARCHAR, + query_text TEXT, + query_embedding VECTOR(1024), + threshold DOUBLE PRECISION, + max_results INTEGER DEFAULT 10 +) +RETURNS TABLE( + comment_id VARCHAR, + comment_plaintext TEXT, + comment_issue_id VARCHAR, + similarity DOUBLE PRECISION, + text_similarity DOUBLE PRECISION +) AS $$ +DECLARE + query_tokens TEXT[]; + query_tsquery TSQUERY; +BEGIN + -- Generate query tokens + SELECT array_agg(DISTINCT lower(word)) + INTO query_tokens + FROM unnest(regexp_split_to_array(query_text, '\s+')) AS word + WHERE length(word) > 2; + + -- Create tsquery from tokens + SELECT to_tsquery(string_agg(lexeme || ':*', ' | ')) + INTO query_tsquery + FROM unnest(query_tokens) lexeme; + + RETURN QUERY + WITH vector_similarity AS ( + SELECT + id, + plaintext, + issue_id, + 1 - (l2_distance(query_embedding, embedding))::DOUBLE PRECISION AS vec_similarity + FROM issue_comments + WHERE id <> current_id + AND 1 - (l2_distance(query_embedding, embedding))::DOUBLE PRECISION > threshold + ), + text_similarity AS ( + SELECT + id, + plaintext, + issue_id, + ts_rank(to_tsvector('english', plaintext), query_tsquery)::DOUBLE PRECISION AS text_sim + FROM issue_comments + WHERE to_tsvector('english', plaintext) @@ query_tsquery + ) + SELECT + vs.id AS comment_id, + vs.plaintext AS comment_plaintext, + vs.issue_id AS comment_issue_id, + vs.vec_similarity AS similarity, + COALESCE(ts.text_sim, 0::DOUBLE PRECISION) AS text_similarity + FROM vector_similarity vs + LEFT JOIN text_similarity ts ON vs.id = ts.id + ORDER BY (vs.vec_similarity + COALESCE(ts.text_sim, 0::DOUBLE PRECISION)) DESC + LIMIT max_results; +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/supabase/seed.sql b/supabase/seed.sql new file mode 100644 index 0000000..e69de29 diff --git a/tests/__mocks__/db.ts b/tests/__mocks__/db.ts index 7df690c..9f25606 100644 --- a/tests/__mocks__/db.ts +++ b/tests/__mocks__/db.ts @@ -1,5 +1,5 @@ // cSpell:disable -import { factory, primaryKey } from "@mswjs/data"; +import { factory, nullable, primaryKey } from "@mswjs/data"; /** * Creates an object that can be used as a db to persist data within tests @@ -7,6 +7,110 @@ import { factory, primaryKey } from "@mswjs/data"; export const db = factory({ users: { id: primaryKey(Number), + login: String, + }, + issue: { + id: primaryKey(Number), + assignees: Array, + html_url: String, + repository_url: String, + state: String, + owner: String, + repo: String, + labels: Array, + author_association: String, + body: nullable(String), + closed_at: nullable(Date), + created_at: nullable(Date), + comments: Number, + comments_url: String, + events_url: String, + labels_url: String, + locked: Boolean, + node_id: String, + title: String, + number: Number, + updated_at: Date, + url: String, + user: nullable(Object), + milestone: nullable(Object), + assignee: nullable({ + avatar_url: String, + email: nullable(String), + events_url: String, + followers_url: String, + following_url: String, + gists_url: String, + gravatar_id: nullable(String), + html_url: String, + id: Number, + login: String, + name: nullable(String), + node_id: String, + organizations_url: String, + received_events_url: String, + repos_url: String, + site_admin: Boolean, + starred_at: String, + starred_url: String, + subscriptions_url: String, + type: String, + url: String, + }), + }, + repo: { + id: primaryKey(Number), + html_url: String, name: String, + url: String, + owner: { + login: String, + id: Number, + }, + issues: Array, + }, + pull: { + id: primaryKey(Number), + html_url: String, + number: Number, + state: String, + title: String, + user: Object, + body: nullable(String), + repo: String, + owner: String, + author: Object, + assignees: Array, + requested_reviewers: Array, + requested_teams: Array, + labels: Array, + draft: Boolean, + created_at: Date, + updated_at: Date, + closed_at: nullable(Date), + merged_at: nullable(Date), + merge_commit_sha: nullable(String), + assignee: nullable(Object), + milestone: nullable(Object), + head: Object, + base: Object, + _links: Object, + author_association: String, + }, + comments: { + id: primaryKey(Number), + node_id: String, + url: String, + issue_url: nullable(String), + pull_request_url: nullable(String), + body: nullable(String), + html_url: String, + user: { + login: String, + type: String, + }, + issue_number: Number, + owner: String, + repo: String, }, }); diff --git a/tests/__mocks__/handlers.ts b/tests/__mocks__/handlers.ts index 0d31c3c..20503d9 100644 --- a/tests/__mocks__/handlers.ts +++ b/tests/__mocks__/handlers.ts @@ -1,11 +1,88 @@ import { http, HttpResponse } from "msw"; import { db } from "./db"; +import issueTemplate from "./issue-template"; /** * Intercepts the routes and returns a custom payload */ export const handlers = [ - http.get("https://api.ubiquity.com/users", () => { - return HttpResponse.json(db.users.getAll()); + http.post("https://api.openai.com/v1/chat/completions", () => { + const answer = `This is a mock answer for the chat`; + + return HttpResponse.json({ + usage: { + completion_tokens: 150, + prompt_tokens: 1000, + total_tokens: 1150, + }, + choices: [ + { + message: { + content: answer, + }, + }, + ], + }); + }), + // GET https://api.github.com/repos/ubiquity/test-repo/issues/1 + http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number", ({ params: { owner, repo, issue_number: issueNumber } }) => + HttpResponse.json( + db.issue.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(issueNumber) } } }) + ) + ), + + // get repo + http.get("https://api.github.com/repos/:owner/:repo", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { + const item = db.repo.findFirst({ where: { name: { equals: repo }, owner: { login: { equals: owner } } } }); + if (!item) { + return new HttpResponse(null, { status: 404 }); + } + return HttpResponse.json(item); }), + // get issue + http.get("https://api.github.com/repos/:owner/:repo/issues", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => + HttpResponse.json(db.issue.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })) + ), + // create issue + http.post("https://api.github.com/repos/:owner/:repo/issues", () => { + const id = db.issue.count() + 1; + const newItem = { ...issueTemplate, id }; + db.issue.create(newItem); + return HttpResponse.json(newItem); + }), + // get repo issues + http.get("https://api.github.com/orgs/:org/repos", ({ params: { org } }: { params: { org: string } }) => + HttpResponse.json(db.repo.findMany({ where: { owner: { login: { equals: org } } } })) + ), + // add comment to issue + http.post("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number: issueNumber } }) => + HttpResponse.json({ owner, repo, issueNumber }) + ), + // list pull requests + http.get("https://api.github.com/repos/:owner/:repo/pulls", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => + HttpResponse.json(db.pull.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })) + ), + // update a pull request + http.patch("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number: pullNumber } }) => + HttpResponse.json({ owner, repo, pull_number: pullNumber }) + ), + + // list issue comments + http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number: issueNumber } }) => + HttpResponse.json( + db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(issueNumber) } } }) + ) + ), + //list review comments + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/comments", ({ params: { owner, repo, pull_number: pullNumber } }) => + HttpResponse.json( + db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(pullNumber) } } }) + ) + ), + // octokit.pulls.get + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number: pullNumber } }) => + HttpResponse.json( + db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pullNumber) } } }) + ) + ), ]; diff --git a/tests/__mocks__/issue-template.ts b/tests/__mocks__/issue-template.ts new file mode 100644 index 0000000..d8f682c --- /dev/null +++ b/tests/__mocks__/issue-template.ts @@ -0,0 +1,55 @@ +export default { + assignee: { + login: "", + avatar_url: "", + email: "undefined", + events_url: "", + followers_url: "", + following_url: "", + gists_url: "", + gravatar_id: null, + html_url: "", + id: 1, + name: "undefined", + node_id: "", + organizations_url: "", + received_events_url: "", + repos_url: "", + site_admin: false, + starred_at: "", + starred_url: "", + subscriptions_url: "", + type: "", + url: "", + }, + author_association: "NONE", + closed_at: null, + comments: 0, + comments_url: "", + created_at: new Date().toISOString(), + events_url: "", + html_url: "https://github.com/ubiquity/test-repo/issues/1", + id: 1, + labels_url: "", + locked: false, + milestone: null, + node_id: "1", + owner: "ubiquity", + number: 1, + repository_url: "https://github.com/ubiquity/test-repo", + state: "open", + title: "issue", + updated_at: "", + url: "https://api.github.com/repos/ubiquity/test-repo/issues/1", + user: null, + repo: "test-repo", + labels: [ + { + name: "Price: 200 USD", + }, + { + name: "Time: 1h", + }, + ], + body: "This is a demo spec for a demo task just perfect for testing.", +}; diff --git a/tests/__mocks__/repo-template.ts b/tests/__mocks__/repo-template.ts new file mode 100644 index 0000000..7bf7be7 --- /dev/null +++ b/tests/__mocks__/repo-template.ts @@ -0,0 +1,11 @@ +export default { + id: 1, + html_url: "", + url: "https://api.github.com/repos/ubiquity/test-repo", + name: "test-repo", + owner: { + login: "ubiquity", + id: 1, + }, + issues: [], +}; diff --git a/tests/__mocks__/users-get.json b/tests/__mocks__/users-get.json index 59f0200..8681c7b 100644 --- a/tests/__mocks__/users-get.json +++ b/tests/__mocks__/users-get.json @@ -1,10 +1,10 @@ [ { "id": 1, - "name": "user1" + "login": "ubiquity" }, { "id": 2, - "name": "user2" + "login": "user2" } ] diff --git a/tests/main.test.ts b/tests/main.test.ts index 7967004..9875f81 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -2,21 +2,410 @@ import { db } from "./__mocks__/db"; import { server } from "./__mocks__/node"; import usersGet from "./__mocks__/users-get.json"; import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it } from "@jest/globals"; +import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Context, SupportedEventsU } from "../src/types"; +import { drop } from "@mswjs/data"; +import issueTemplate from "./__mocks__/issue-template"; +import repoTemplate from "./__mocks__/repo-template"; +import { askQuestion } from "../src/handlers/ask-llm"; +import { runPlugin } from "../src/plugin"; +import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; +import { envSchema } from "../src/types/env"; +import { CompletionsType } from "../src/adapters/openai/helpers/completions"; -beforeAll(() => server.listen()); -afterEach(() => server.resetHandlers()); +const TEST_QUESTION = "what is pi?"; +const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?"; +const LOG_CALLER = "_Logs."; +const ISSUE_ID_2_CONTENT = "More context here #2"; +const ISSUE_ID_3_CONTENT = "More context here #3"; + +type Comment = { + id: number; + user: { + login: string; + type: string; + }; + body: string; + url: string; + html_url: string; + owner: string; + repo: string; + issue_number: number; + issue_url?: string; + pull_request_url?: string; +}; + +const octokit = jest.requireActual("@octokit/rest"); +jest.requireActual("openai"); + +beforeAll(() => { + server.listen(); +}); +afterEach(() => { + drop(db); + server.resetHandlers(); +}); afterAll(() => server.close()); -describe("User tests", () => { - beforeEach(() => { - for (const item of usersGet) { - db.users.create(item); - } +// TESTS + +describe("Ask plugin tests", () => { + beforeEach(async () => { + await setupTests(); + }); + + it("should ask GPT a question", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + const res = await askQuestion(ctx, TEST_QUESTION); + + expect(res).toBeDefined(); + + expect(res?.answer).toBe("This is a mock answer for the chat"); + }); + + it("should not ask GPT a question if comment is from a bot", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const infoSpy = jest.spyOn(ctx.logger, "info"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + if (!ctx.payload.comment.user) return; + ctx.payload.comment.user.type = "Bot"; + await runPlugin(ctx); + + expect(infoSpy).toHaveBeenCalledWith("Comment is from a bot. Skipping."); + }); + + it("should not ask GPT a question if comment does not start with bot name", async () => { + const ctx = createContext(TEST_QUESTION); + const infoSpy = jest.spyOn(ctx.logger, "info"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + await runPlugin(ctx); + + expect(infoSpy).toHaveBeenCalledWith("Comment does not mention the app. Skipping."); + }); + + it("should not ask GPT a question if no question is provided", async () => { + const ctx = createContext(`@UbiquityOS `); + const infoSpy = jest.spyOn(ctx.logger, "info"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + await runPlugin(ctx); + + expect(infoSpy).toHaveBeenCalledWith("Comment is empty. Skipping."); + }); + it("Should throw if OPENAI_API_KEY is not defined", () => { + const settings = {}; + expect(() => Value.Decode(envSchema, settings)).toThrow(TransformDecodeCheckError); }); - it("Should fetch all the users", async () => { - const res = await fetch("https://api.ubiquity.com/users"); - const data = await res.json(); - expect(data).toMatchObject(usersGet); + it("should construct the chat history correctly", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const infoSpy = jest.spyOn(ctx.logger, "info"); + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + await runPlugin(ctx); + + expect(infoSpy).toHaveBeenCalledTimes(3); + expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); + expect(infoSpy).toHaveBeenNthCalledWith(3, "Answer: This is a mock answer for the chat", { + caller: LOG_CALLER, + tokenUsage: { + input: 1000, + output: 150, + total: 1150, + }, + }); + }); + + it("should collect the linked issues correctly", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const infoSpy = jest.spyOn(ctx.logger, "info"); + createComments([ + transformCommentTemplate(1, 1, ISSUE_ID_2_CONTENT, "ubiquity", "test-repo", true, "2"), + transformCommentTemplate(2, 1, TEST_QUESTION, "ubiquity", "test-repo", true, "1"), + transformCommentTemplate(3, 2, ISSUE_ID_3_CONTENT, "ubiquity", "test-repo", true, "3"), + transformCommentTemplate(4, 3, "Just a comment", "ubiquity", "test-repo", true, "1"), + ]); + + await runPlugin(ctx); + + expect(infoSpy).toHaveBeenCalledTimes(3); + + expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); + + const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === + + This is a demo spec for a demo task just perfect for testing. + === End Current Issue #1 Specification === + + === Current Issue #1 Conversation === ubiquity/test-repo #1 === + + 1 ubiquity: ${ISSUE_ID_2_CONTENT} [#2](https://www.github.com/ubiquity/test-repo/issues/2) + 2 ubiquity: ${TEST_QUESTION} [#1](https://www.github.com/ubiquity/test-repo/issues/1) + === End Current Issue #1 Conversation === + + === Linked Issue #2 Specification === ubiquity/test-repo/2 === + + Related to issue #3 + === End Linked Issue #2 Specification === + + === Linked Issue #2 Conversation === ubiquity/test-repo #2 === + + 3 ubiquity: ${ISSUE_ID_3_CONTENT} [#3](https://www.github.com/ubiquity/test-repo/issues/3) + === End Linked Issue #2 Conversation === + + === Linked Issue #3 Specification === ubiquity/test-repo/3 === + + Just another issue + === End Linked Issue #3 Specification === + + === Linked Issue #3 Conversation === ubiquity/test-repo #3 === + + 4 ubiquity: Just a comment [#1](https://www.github.com/ubiquity/test-repo/issues/1) + === End Linked Issue #3 Conversation ===\n + `; + + const normalizedExpected = normalizeString(prompt); + const normalizedReceived = normalizeString(infoSpy.mock.calls[1][0]); + + expect(normalizedReceived).toEqual(normalizedExpected); }); }); + +// HELPERS + +function normalizeString(str: string) { + return str.replace(/\s+/g, " ").trim(); +} + +function transformCommentTemplate(commentId: number, issueNumber: number, body: string, owner: string, repo: string, isIssue = true, linkTo: string = "1") { + const COMMENT_TEMPLATE = { + id: 1, + user: { + login: "ubiquity", + type: "User", + }, + body: TEST_QUESTION, + url: "https://api.github.com/repos/ubiquity/test-repo/issues/comments/1", + html_url: "https://www.github.com/ubiquity/test-repo/issues/1", + owner: "ubiquity", + repo: "test-repo", + issue_number: 1, + }; + + const comment: Comment = { + id: commentId, + user: { + login: COMMENT_TEMPLATE.user.login, + type: "User", + }, + body: body + ` [#${linkTo}](${COMMENT_TEMPLATE.html_url.replace("1", linkTo.toString())})`, + url: COMMENT_TEMPLATE.url.replace("1", issueNumber.toString()), + html_url: COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()), + owner: owner, + repo: repo, + issue_number: issueNumber, + }; + + if (isIssue) { + comment.issue_url = COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()); + } else { + comment.pull_request_url = COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()); + } + + return comment; +} + +async function setupTests() { + for (const item of usersGet) { + db.users.create(item); + } + + db.repo.create({ + ...repoTemplate, + }); + + db.issue.create({ + ...issueTemplate, + }); + + db.issue.create({ + ...issueTemplate, + id: 2, + number: 2, + body: "Related to issue #3", + }); + + db.issue.create({ + ...issueTemplate, + id: 3, + number: 3, + body: "Just another issue", + }); +} + +function createComments(comments: Comment[]) { + for (const comment of comments) { + db.comments.create({ + ...comment, + }); + } +} + +function createContext(body = TEST_SLASH_COMMAND) { + const user = db.users.findFirst({ where: { id: { equals: 1 } } }); + return { + payload: { + issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["issue"], + sender: user, + repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["repository"], + comment: { body, user: user } as unknown as Context["payload"]["comment"], + action: "created" as string, + installation: { id: 1 } as unknown as Context["payload"]["installation"], + organization: { login: "ubiquity" } as unknown as Context["payload"]["organization"], + }, + owner: "ubiquity", + repo: "test-repo", + logger: new Logs("debug"), + config: {}, + env: { + UBIQUITY_OS_APP_NAME: "UbiquityOS", + OPENAI_API_KEY: "test", + }, + adapters: { + supabase: { + issue: { + getIssue: async () => { + return [ + { + id: "1", + markdown: "This is a demo spec for a demo task just perfect for testing.", + plaintext: "This is a demo spec for a demo task just perfect for testing.", + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + ]; + }, + findSimilarIssues: async () => { + return [ + { + issue_id: "2", + issue_plaintext: "Related to issue #3", + similarity: 0.5, + }, + { + issue_id: "3", + issue_plaintext: "Some other issue", + similarity: 0.3, + }, + ]; + }, + }, + comment: { + getComments: async () => { + return [ + { + id: "1", + plaintext: TEST_QUESTION, + markdown: TEST_QUESTION, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "2", + plaintext: ISSUE_ID_2_CONTENT, + markdown: ISSUE_ID_2_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "3", + plaintext: ISSUE_ID_3_CONTENT, + markdown: ISSUE_ID_3_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "4", + plaintext: "Something new", + markdown: "Something new", + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + ]; + }, + findSimilarComments: async () => { + return [ + { + id: "2", + plaintext: ISSUE_ID_2_CONTENT, + markdown: ISSUE_ID_2_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "3", + plaintext: ISSUE_ID_3_CONTENT, + markdown: ISSUE_ID_3_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "4", + plaintext: "New Comment", + markdown: "New Comment", + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + ]; + }, + }, + }, + voyage: { + embedding: { + createEmbedding: async () => { + return new Array(1024).fill(0); + }, + }, + reranker: { + reRankResults: async (similarText: string[]) => { + return similarText; + }, + }, + }, + openai: { + completions: { + createCompletion: async (): Promise => { + return { + answer: "This is a mock answer for the chat", + tokenUsage: { + input: 1000, + output: 150, + total: 1150, + }, + }; + }, + }, + }, + }, + octokit: new octokit.Octokit(), + eventName: "issue_comment.created" as SupportedEventsU, + } as unknown as Context; +} diff --git a/wrangler.toml b/wrangler.toml index 5a0953a..f780a61 100644 --- a/wrangler.toml +++ b/wrangler.toml @@ -1,4 +1,4 @@ -name = "your-plugin-name" +name = "command-ask" main = "src/worker.ts" compatibility_date = "2024-05-23" node_compat = true diff --git a/yarn.lock b/yarn.lock index d4049db..a5fea8f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,32 +2,6 @@ # yarn lockfile v1 -"@actions/core@1.10.1": - version "1.10.1" - resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.1.tgz#61108e7ac40acae95ee36da074fa5850ca4ced8a" - integrity sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g== - dependencies: - "@actions/http-client" "^2.0.1" - uuid "^8.3.2" - -"@actions/github@6.0.0": - version "6.0.0" - resolved "https://registry.yarnpkg.com/@actions/github/-/github-6.0.0.tgz#65883433f9d81521b782a64cc1fd45eef2191ea7" - integrity sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g== - dependencies: - "@actions/http-client" "^2.2.0" - "@octokit/core" "^5.0.1" - "@octokit/plugin-paginate-rest" "^9.0.0" - "@octokit/plugin-rest-endpoint-methods" "^10.0.0" - -"@actions/http-client@^2.0.1", "@actions/http-client@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.2.1.tgz#ed3fe7a5a6d317ac1d39886b0bb999ded229bb38" - integrity sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw== - dependencies: - tunnel "^0.0.6" - undici "^5.25.4" - "@ampproject/remapping@^2.2.0": version "2.3.0" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" @@ -285,9 +259,9 @@ "@babel/helper-plugin-utils" "^7.24.6" "@babel/runtime@^7.21.0": - version "7.24.6" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.6.tgz#5b76eb89ad45e2e4a0a8db54c456251469a3358e" - integrity sha512-Ja18XcETdEl5mzzACGd+DKgaGJzPTCow7EglgwTmHdwokzDFYh/MHua6lU6DV/hjF2IaOJ4oX2nqnjG7RElKOw== + version "7.25.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.25.7.tgz#7ffb53c37a8f247c8c4d335e89cdf16a2e0d0fb6" + integrity sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w== dependencies: regenerator-runtime "^0.14.0" @@ -344,6 +318,14 @@ dependencies: statuses "^2.0.1" +"@bundled-es-modules/tough-cookie@^0.1.6": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@bundled-es-modules/tough-cookie/-/tough-cookie-0.1.6.tgz#fa9cd3cedfeecd6783e8b0d378b4a99e52bde5d3" + integrity sha512-dvMHbL464C0zI+Yqxbz6kZ5TOEp7GLW+pry/RWndAR8MJQAXZ2rPmIs8tziTZjeIyhSNZgZbCePtfSbdWqStJw== + dependencies: + "@types/tough-cookie" "^4.0.5" + tough-cookie "^4.1.4" + "@cloudflare/kv-asset-handler@0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.3.2.tgz#06437b75664729823ac9033b89f06a3b078e4f55" @@ -1233,41 +1215,49 @@ integrity sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew== "@inquirer/confirm@^3.0.0": - version "3.1.9" - resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-3.1.9.tgz#1bc384bc8267827ec75d0684e189692bb4dda38b" - integrity sha512-UF09aejxCi4Xqm6N/jJAiFXArXfi9al52AFaSD+2uIHnhZGtd1d6lIGTRMPouVSJxbGEi+HkOWSYaiEY/+szUw== + version "3.2.0" + resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-3.2.0.tgz#6af1284670ea7c7d95e3f1253684cfbd7228ad6a" + integrity sha512-oOIwPs0Dvq5220Z8lGL/6LHRTEr9TgLHmiI99Rj1PJ1p1czTys+olrgBqZk4E2qC0YTzeHprxSQmoHioVdJ7Lw== dependencies: - "@inquirer/core" "^8.2.2" - "@inquirer/type" "^1.3.3" + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" -"@inquirer/core@^8.2.2": - version "8.2.2" - resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-8.2.2.tgz#797b1e71b920c9788b9d26d89c8b334149852d52" - integrity sha512-K8SuNX45jEFlX3EBJpu9B+S2TISzMPGXZIuJ9ME924SqbdW6Pt6fIkKvXg7mOEOKJ4WxpQsxj0UTfcL/A434Ww== +"@inquirer/core@^9.1.0": + version "9.2.1" + resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-9.2.1.tgz#677c49dee399c9063f31e0c93f0f37bddc67add1" + integrity sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg== dependencies: - "@inquirer/figures" "^1.0.3" - "@inquirer/type" "^1.3.3" + "@inquirer/figures" "^1.0.6" + "@inquirer/type" "^2.0.0" "@types/mute-stream" "^0.0.4" - "@types/node" "^20.12.13" + "@types/node" "^22.5.5" "@types/wrap-ansi" "^3.0.0" ansi-escapes "^4.3.2" - chalk "^4.1.2" - cli-spinners "^2.9.2" cli-width "^4.1.0" mute-stream "^1.0.0" signal-exit "^4.1.0" strip-ansi "^6.0.1" wrap-ansi "^6.2.0" + yoctocolors-cjs "^2.1.2" -"@inquirer/figures@^1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.3.tgz#1227cc980f88e6d6ab85abadbf164f5038041edd" - integrity sha512-ErXXzENMH5pJt5/ssXV0DfWUZqly8nGzf0UcBV9xTnP+KyffE2mqyxIMBrZ8ijQck2nU0TQm40EQB53YreyWHw== +"@inquirer/figures@^1.0.6": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.7.tgz#d050ccc0eabfacc0248c4ff647a9dfba1b01594b" + integrity sha512-m+Trk77mp54Zma6xLkLuY+mvanPxlE4A7yNKs2HBiyZ4UkVs28Mv5c/pgWrHeInx+USHeX/WEPzjrWrcJiQgjw== + +"@inquirer/type@^1.5.3": + version "1.5.5" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-1.5.5.tgz#303ea04ce7ad2e585b921b662b3be36ef7b4f09b" + integrity sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA== + dependencies: + mute-stream "^1.0.0" -"@inquirer/type@^1.3.3": - version "1.3.3" - resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-1.3.3.tgz#26b2628630fd2381c7fa1e3ab396feb9bbc575da" - integrity sha512-xTUt0NulylX27/zMx04ZYar/kr1raaiFTVvQ5feljQsiAgdm0WPj4S73/ye0fbslh+15QrIuDvfCXTek7pMY5A== +"@inquirer/type@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-2.0.0.tgz#08fa513dca2cb6264fe1b0a2fabade051444e3f6" + integrity sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag== + dependencies: + mute-stream "^1.0.0" "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" @@ -1517,15 +1507,10 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" -"@mswjs/cookies@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@mswjs/cookies/-/cookies-1.1.0.tgz#1528eb43630caf83a1d75d5332b30e75e9bb1b5b" - integrity sha512-0ZcCVQxifZmhwNBoQIrystCb+2sWBY2Zw8lpfJBPCHGCA/HWqehITeCRVIv4VMy8MPlaHo2w2pTHFV2pFfqKPw== - -"@mswjs/data@0.16.1": - version "0.16.1" - resolved "https://registry.yarnpkg.com/@mswjs/data/-/data-0.16.1.tgz#ee41b95b8f2e954a07b0eb54154592a2459064d1" - integrity sha512-VhJvL/VmgAuU9/tDOcKcxHfNd+8nxYntZnrkaQEQPvZZnFwQQR9bzI1FTRROGxCHVoyfv9v84AEkl/7CIw4FAg== +"@mswjs/data@^0.16.2": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@mswjs/data/-/data-0.16.2.tgz#61d14dcb28851b25b2ca97e343d40d57870670ec" + integrity sha512-/C0d/PBcJyQJokUhcjO4HiZPc67hzllKlRtD1XELygl2t991/ATAAQJVcStn4YtVALsNodruzOHT0JIvgr0hnA== dependencies: "@types/lodash" "^4.14.172" "@types/md5" "^2.3.0" @@ -1543,16 +1528,16 @@ optionalDependencies: msw "^2.0.8" -"@mswjs/interceptors@^0.29.0": - version "0.29.1" - resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.29.1.tgz#e77fc58b5188569041d0440b25c9e9ebb1ccd60a" - integrity sha512-3rDakgJZ77+RiQUuSK69t1F0m8BQKA8Vh5DCS5V0DWvNY67zob2JhhQrhCO0AKLGINTRSFd1tBaHcJTkhefoSw== +"@mswjs/interceptors@^0.35.8": + version "0.35.9" + resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.35.9.tgz#1e1488ff2f333683d374eccc8c0f4d5d851c6d3d" + integrity sha512-SSnyl/4ni/2ViHKkiZb8eajA/eN1DNFaHjhGiLUdZvDz6PKF4COSf/17xqSz64nOo2Ia29SA6B2KNCsyCbVmaQ== dependencies: "@open-draft/deferred-promise" "^2.2.0" "@open-draft/logger" "^0.3.0" "@open-draft/until" "^2.0.0" is-node-process "^1.2.0" - outvariant "^1.2.1" + outvariant "^1.4.3" strict-event-emitter "^0.5.1" "@nodelib/fs.scandir@2.1.5": @@ -1602,7 +1587,7 @@ resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7" integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA== -"@octokit/core@^5.0.1", "@octokit/core@^5.0.2": +"@octokit/core@^5.0.2": version "5.2.0" resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea" integrity sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg== @@ -1632,11 +1617,6 @@ "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" -"@octokit/openapi-types@^20.0.0": - version "20.0.0" - resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-20.0.0.tgz#9ec2daa0090eeb865ee147636e0c00f73790c6e5" - integrity sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA== - "@octokit/openapi-types@^22.2.0": version "22.2.0" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-22.2.0.tgz#75aa7dcd440821d99def6a60b5f014207ae4968e" @@ -1654,13 +1634,6 @@ dependencies: "@octokit/types" "^13.5.0" -"@octokit/plugin-paginate-rest@^9.0.0": - version "9.2.1" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz#2e2a2f0f52c9a4b1da1a3aa17dabe3c459b9e401" - integrity sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw== - dependencies: - "@octokit/types" "^12.6.0" - "@octokit/plugin-request-log@^4.0.0": version "4.0.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz#98a3ca96e0b107380664708111864cb96551f958" @@ -1673,13 +1646,6 @@ dependencies: "@octokit/types" "^13.5.0" -"@octokit/plugin-rest-endpoint-methods@^10.0.0": - version "10.4.1" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz#41ba478a558b9f554793075b2e20cd2ef973be17" - integrity sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg== - dependencies: - "@octokit/types" "^12.6.0" - "@octokit/request-error@^5.1.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30" @@ -1716,13 +1682,6 @@ "@octokit/plugin-request-log" "^4.0.0" "@octokit/plugin-rest-endpoint-methods" "13.2.2" -"@octokit/types@^12.6.0": - version "12.6.0" - resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.6.0.tgz#8100fb9eeedfe083aae66473bd97b15b62aedcb2" - integrity sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw== - dependencies: - "@octokit/openapi-types" "^20.0.0" - "@octokit/types@^13.0.0", "@octokit/types@^13.1.0", "@octokit/types@^13.5.0": version "13.5.0" resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.5.0.tgz#4796e56b7b267ebc7c921dcec262b3d5bfb18883" @@ -1801,10 +1760,10 @@ ignore "^5.1.8" p-map "^4.0.0" -"@supabase/auth-js@2.64.2": - version "2.64.2" - resolved "https://registry.yarnpkg.com/@supabase/auth-js/-/auth-js-2.64.2.tgz#fe6828ed2c9844bf2e71b27f88ddfb635f24d1c1" - integrity sha512-s+lkHEdGiczDrzXJ1YWt2y3bxRi+qIUnXcgkpLSrId7yjBeaXBFygNjTaoZLG02KNcYwbuZ9qkEIqmj2hF7svw== +"@supabase/auth-js@2.65.0": + version "2.65.0" + resolved "https://registry.yarnpkg.com/@supabase/auth-js/-/auth-js-2.65.0.tgz#e345c492f8cbc31cd6289968eae0e349ff0f39e9" + integrity sha512-+wboHfZufAE2Y612OsKeVP4rVOeGZzzMLD/Ac3HrTQkkY4qXNjI6Af9gtmxwccE5nFvTiF114FEbIQ1hRq5uUw== dependencies: "@supabase/node-fetch" "^2.6.14" @@ -1822,41 +1781,41 @@ dependencies: whatwg-url "^5.0.0" -"@supabase/postgrest-js@1.15.5": - version "1.15.5" - resolved "https://registry.yarnpkg.com/@supabase/postgrest-js/-/postgrest-js-1.15.5.tgz#7fa7744cb0991328bb1a7757861e435a5477f358" - integrity sha512-YR4TiitTE2hizT7mB99Cl3V9i00RAY5sUxS2/NuWWzkreM7OeYlP2OqnqVwwb4z6ILn+j8x9e/igJDepFhjswQ== +"@supabase/postgrest-js@1.16.1": + version "1.16.1" + resolved "https://registry.yarnpkg.com/@supabase/postgrest-js/-/postgrest-js-1.16.1.tgz#68dfa0581d8ae4296378cb8815bbde3f4602aef5" + integrity sha512-EOSEZFm5pPuCPGCmLF1VOCS78DfkSz600PBuvBND/IZmMciJ1pmsS3ss6TkB6UkuvTybYiBh7gKOYyxoEO3USA== dependencies: "@supabase/node-fetch" "^2.6.14" -"@supabase/realtime-js@2.9.5": - version "2.9.5" - resolved "https://registry.yarnpkg.com/@supabase/realtime-js/-/realtime-js-2.9.5.tgz#22b7de952a7f37868ffc25d32d19f03f27bfcb40" - integrity sha512-TEHlGwNGGmKPdeMtca1lFTYCedrhTAv3nZVoSjrKQ+wkMmaERuCe57zkC5KSWFzLYkb5FVHW8Hrr+PX1DDwplQ== +"@supabase/realtime-js@2.10.2": + version "2.10.2" + resolved "https://registry.yarnpkg.com/@supabase/realtime-js/-/realtime-js-2.10.2.tgz#c2b42d17d723d2d2a9146cfad61dc3df1ce3127e" + integrity sha512-qyCQaNg90HmJstsvr2aJNxK2zgoKh9ZZA8oqb7UT2LCh3mj9zpa3Iwu167AuyNxsxrUE8eEJ2yH6wLCij4EApA== dependencies: "@supabase/node-fetch" "^2.6.14" "@types/phoenix" "^1.5.4" "@types/ws" "^8.5.10" ws "^8.14.2" -"@supabase/storage-js@2.6.0": - version "2.6.0" - resolved "https://registry.yarnpkg.com/@supabase/storage-js/-/storage-js-2.6.0.tgz#0fa5e04db760ed7f78e4394844a6d409e537adc5" - integrity sha512-REAxr7myf+3utMkI2oOmZ6sdplMZZ71/2NEIEMBZHL9Fkmm3/JnaOZVSRqvG4LStYj2v5WhCruCzuMn6oD/Drw== +"@supabase/storage-js@2.7.0": + version "2.7.0" + resolved "https://registry.yarnpkg.com/@supabase/storage-js/-/storage-js-2.7.0.tgz#9ff322d2c3b141087aa34115cf14205e4980ce75" + integrity sha512-iZenEdO6Mx9iTR6T7wC7sk6KKsoDPLq8rdu5VRy7+JiT1i8fnqfcOr6mfF2Eaqky9VQzhP8zZKQYjzozB65Rig== dependencies: "@supabase/node-fetch" "^2.6.14" -"@supabase/supabase-js@2.43.5": - version "2.43.5" - resolved "https://registry.yarnpkg.com/@supabase/supabase-js/-/supabase-js-2.43.5.tgz#e4d5f9e5e21ef4226e0cb013c7e51fb3c5262581" - integrity sha512-Y4GukjZWW6ouohMaPlYz8tSz9ykf9jY7w9/RhqKuScmla3Xiklce8eLr8TYAtA+oQYCWxo3RgS3B6O4rd/72FA== +"@supabase/supabase-js@^2.45.4": + version "2.45.4" + resolved "https://registry.yarnpkg.com/@supabase/supabase-js/-/supabase-js-2.45.4.tgz#0bcf8722f1732dfe3e4c5190d23e3938dcc689c3" + integrity sha512-E5p8/zOLaQ3a462MZnmnz03CrduA5ySH9hZyL03Y+QZLIOO4/Gs8Rdy4ZCKDHsN7x0xdanVEWWFN3pJFQr9/hg== dependencies: - "@supabase/auth-js" "2.64.2" + "@supabase/auth-js" "2.65.0" "@supabase/functions-js" "2.4.1" "@supabase/node-fetch" "2.6.15" - "@supabase/postgrest-js" "1.15.5" - "@supabase/realtime-js" "2.9.5" - "@supabase/storage-js" "2.6.0" + "@supabase/postgrest-js" "1.16.1" + "@supabase/realtime-js" "2.10.2" + "@supabase/storage-js" "2.7.0" "@types/babel__core@^7.1.14": version "7.20.5" @@ -1929,10 +1888,18 @@ dependencies: "@types/istanbul-lib-report" "*" +"@types/jest@^29.5.12": + version "29.5.12" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.12.tgz#7f7dc6eb4cf246d2474ed78744b05d06ce025544" + integrity sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + "@types/lodash@^4.14.172": - version "4.17.4" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.4.tgz#0303b64958ee070059e3a7184048a55159fe20b7" - integrity sha512-wYCP26ZLxaT3R39kiN2+HcJ4kTd3U1waI/cY7ivWYqFP6pW3ZNpvi6Wd6PHZx7T/t8z0vlkXMg3QYLa7DZ/IJQ== + version "4.17.10" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.10.tgz#64f3edf656af2fe59e7278b73d3e62404144a6e6" + integrity sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ== "@types/md5@^2.3.0": version "2.3.5" @@ -1946,6 +1913,14 @@ dependencies: "@types/node" "*" +"@types/node-fetch@^2.6.4": + version "2.6.11" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" + integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== + dependencies: + "@types/node" "*" + form-data "^4.0.0" + "@types/node-forge@^1.3.0": version "1.3.11" resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da" @@ -1953,7 +1928,7 @@ dependencies: "@types/node" "*" -"@types/node@*", "@types/node@^20.12.13": +"@types/node@*": version "20.13.0" resolved "https://registry.yarnpkg.com/@types/node/-/node-20.13.0.tgz#011a76bc1e71ae9a026dddcfd7039084f752c4b6" integrity sha512-FM6AOb3khNkNIXPnHFDYaHerSv8uN22C91z098AnGccVu+Pcdhi+pNUFDi0iLmPIsVE0JBD0KVS7mzUYt4nRzQ== @@ -1967,10 +1942,24 @@ dependencies: undici-types "~5.26.4" +"@types/node@^18.11.18": + version "18.19.39" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.39.tgz#c316340a5b4adca3aee9dcbf05de385978590593" + integrity sha512-nPwTRDKUctxw3di5b4TfT3I0sWDiWoPQCZjXhvdkINntwr8lcoVCKsTgnXeRubKIlfnV+eN/HYk6Jb40tbcEAQ== + dependencies: + undici-types "~5.26.4" + +"@types/node@^22.5.5": + version "22.7.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.7.5.tgz#cfde981727a7ab3611a481510b473ae54442b92b" + integrity sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ== + dependencies: + undici-types "~6.19.2" + "@types/phoenix@^1.5.4": - version "1.6.4" - resolved "https://registry.yarnpkg.com/@types/phoenix/-/phoenix-1.6.4.tgz#cceac93a827555473ad38057d1df7d06eef1ed71" - integrity sha512-B34A7uot1Cv0XtaHRYDATltAdKx0BvVKNgYNqE4WjtPUa4VQJM7kxeXcVKaH+KS+kCmZ+6w+QaUdcljiheiBJA== + version "1.6.5" + resolved "https://registry.yarnpkg.com/@types/phoenix/-/phoenix-1.6.5.tgz#5654e14ec7ad25334a157a20015996b6d7d2075e" + integrity sha512-xegpDuR+z0UqG9fwHqNoy3rI7JDlvaPh2TY47Fl80oq6g+hXT+c/LEuE43X48clZ6lOfANl5WrPur9fYO1RJ/w== "@types/pluralize@^0.0.29": version "0.0.29" @@ -1987,6 +1976,11 @@ resolved "https://registry.yarnpkg.com/@types/statuses/-/statuses-2.0.5.tgz#f61ab46d5352fd73c863a1ea4e1cef3b0b51ae63" integrity sha512-jmIUGWrAiwu3dZpxntxieC+1n/5c3mjrImkmOSQ2NC5uP6cYO4aAZDdSmRcI5C1oiTmqlZGHC+/NmJrKogbP5A== +"@types/tough-cookie@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.5.tgz#cb6e2a691b70cb177c6e3ae9c1d2e8b2ea8cd304" + integrity sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA== + "@types/uuid@^8.3.0": version "8.3.4" resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.4.tgz#bd86a43617df0594787d38b735f55c805becf1bc" @@ -1998,9 +1992,9 @@ integrity sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g== "@types/ws@^8.5.10": - version "8.5.10" - resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.10.tgz#4acfb517970853fa6574a3a6886791d04a396787" - integrity sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A== + version "8.5.12" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.12.tgz#619475fe98f35ccca2a2f6c137702d85ec247b7e" + integrity sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ== dependencies: "@types/node" "*" @@ -2097,6 +2091,11 @@ "@typescript-eslint/types" "7.13.1" eslint-visitor-keys "^3.4.3" +"@ubiquity-dao/ubiquibot-logger@^1.3.0": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@ubiquity-dao/ubiquibot-logger/-/ubiquibot-logger-1.3.1.tgz#c3f45d70014dcc2551442c28101046e1c8ea6886" + integrity sha512-kDLnVP87Y3yZV6NnqIEDAOz+92IW0nIcccML2lUn93uZ5ada78vfdTPtwPJo8tkXl1Z9qMKAqqHkwBMp1Ksnag== + JSONStream@^1.3.5: version "1.3.5" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" @@ -2105,6 +2104,13 @@ JSONStream@^1.3.5: jsonparse "^1.2.0" through ">=2.2.7 <3" +abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== + dependencies: + event-target-shim "^5.0.0" + acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" @@ -2125,6 +2131,13 @@ acorn@^8.8.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== +agentkeepalive@^4.2.1: + version "4.5.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" + integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== + dependencies: + humanize-ms "^1.2.1" + aggregate-error@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" @@ -2288,6 +2301,11 @@ async-lock@^1.4.1: resolved "https://registry.yarnpkg.com/async-lock/-/async-lock-1.4.1.tgz#56b8718915a9b68b10fce2f2a9a3dddf765ef53f" integrity sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ== +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + available-typed-arrays@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz#a5cc375d6a03c2efc87a553f3e0b1522def14846" @@ -2365,6 +2383,11 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + before-after-hook@^2.2.0: version "2.2.3" resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" @@ -2431,6 +2454,14 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.2.1" + call-bind@^1.0.2, call-bind@^1.0.5, call-bind@^1.0.6, call-bind@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" @@ -2566,11 +2597,6 @@ cli-cursor@^4.0.0: dependencies: restore-cursor "^4.0.0" -cli-spinners@^2.9.2: - version "2.9.2" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" - integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== - cli-truncate@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-4.0.0.tgz#6cc28a2924fee9e25ce91e973db56c7066e6172a" @@ -2637,6 +2663,13 @@ colorette@^2.0.20: resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + commander@^12.1.0, commander@~12.1.0: version "12.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" @@ -2998,6 +3031,11 @@ defu@^6.1.4: resolved "https://registry.yarnpkg.com/defu/-/defu-6.1.4.tgz#4e0c9cf9ff68fe5f3d7f2765cc1a012dfdcb0479" integrity sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg== +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + deprecation@^2.0.0: version "2.3.1" resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" @@ -3032,7 +3070,7 @@ dot-prop@^5.1.0: dependencies: is-obj "^2.0.0" -dotenv@16.4.5: +dotenv@^16.4.5: version "16.4.5" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== @@ -3381,11 +3419,21 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== +event-target-shim@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== + eventemitter3@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== +events@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" @@ -3426,7 +3474,7 @@ exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== -expect@^29.7.0: +expect@^29.0.0, expect@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== @@ -3556,6 +3604,33 @@ for-each@^0.3.3: dependencies: is-callable "^1.1.3" +form-data-encoder@1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" + integrity sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A== + +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +formdata-node@^4.3.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-4.4.1.tgz#23f6a5cb9cb55315912cbec4ff7b0f59bbd191e2" + integrity sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ== + dependencies: + node-domexception "1.0.0" + web-streams-polyfill "4.0.0-beta.3" + +formdata-node@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-6.0.3.tgz#48f8e2206ae2befded82af621ef015f08168dc6d" + integrity sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg== + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -3655,9 +3730,9 @@ get-symbol-description@^1.0.2: get-intrinsic "^1.2.4" get-tsconfig@^4.7.5: - version "4.7.5" - resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.5.tgz#5e012498579e9a6947511ed0cd403272c7acbbaf" - integrity sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw== + version "4.8.1" + resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.8.1.tgz#8995eb391ae6e1638d251118c7b56de7eb425471" + integrity sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg== dependencies: resolve-pkg-maps "^1.0.0" @@ -3756,9 +3831,9 @@ graphemer@^1.4.0: integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== graphql@^16.8.1: - version "16.8.1" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.8.1.tgz#1930a965bef1170603702acdb68aedd3f3cf6f07" - integrity sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw== + version "16.9.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.9.0.tgz#1c310e63f16a49ce1fbb230bd0a000e99f6f115f" + integrity sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw== has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" @@ -3841,6 +3916,13 @@ human-signals@^5.0.0: resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28" integrity sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ== +humanize-ms@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" + integrity sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ== + dependencies: + ms "^2.0.0" + husky@9.0.11: version "9.0.11" resolved "https://registry.yarnpkg.com/husky/-/husky-9.0.11.tgz#fc91df4c756050de41b3e478b2158b87c1e79af9" @@ -3851,6 +3933,11 @@ identity-function@^1.0.0: resolved "https://registry.yarnpkg.com/identity-function/-/identity-function-1.0.0.tgz#bea1159f0985239be3ca348edf40ce2f0dd2c21d" integrity sha512-kNrgUK0qI+9qLTBidsH85HjDLpZfrrS0ElquKKe/fJFdB3D7VeKdXXEvOPDUHSHOzdZKCAAaQIWWyp0l2yq6pw== +ieee754@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + ignore@^5.1.4, ignore@^5.1.8, ignore@^5.2.0, ignore@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" @@ -4591,6 +4678,11 @@ jiti@^1.21.0: resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.6.tgz#6c7f7398dd4b3142767f9a168af2f317a428d268" integrity sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w== +js-base64@3.7.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-3.7.2.tgz#816d11d81a8aff241603d19ce5761e13e41d7745" + integrity sha512-NnRs6dsyqUXejqk/yv2aiXlAvOs56sLkX6nUdeaNezI5LFFLlsZjOThmwnrcwh5ZZRwZlCMnVAY3CvhIhoVEKQ== + js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" @@ -4928,6 +5020,18 @@ micromatch@^4.0.4, micromatch@^4.0.5, micromatch@^4.0.7, micromatch@~4.0.7: braces "^3.0.3" picomatch "^2.3.1" +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + mime@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" @@ -5002,16 +5106,21 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== +ms@^2.0.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + msw@^2.0.8: - version "2.3.1" - resolved "https://registry.yarnpkg.com/msw/-/msw-2.3.1.tgz#bfc73e256ffc2c74ec4381b604abb258df35f32b" - integrity sha512-ocgvBCLn/5l3jpl1lssIb3cniuACJLoOfZu01e3n5dbJrpA5PeeWn28jCLgQDNt6d7QT8tF2fYRzm9JoEHtiig== + version "2.4.10" + resolved "https://registry.yarnpkg.com/msw/-/msw-2.4.10.tgz#148d809f8e9dfd3c7d0abb93c1641bfaea877b9e" + integrity sha512-bDQh9b25JK4IKMs5hnamwAkcNZ9RwA4mR/4YcgWkzwHOxj7UICbVJfmChJvY1UCAAMraPpvjHdxjoUDpc3F+Qw== dependencies: "@bundled-es-modules/cookie" "^2.0.0" "@bundled-es-modules/statuses" "^1.0.1" + "@bundled-es-modules/tough-cookie" "^0.1.6" "@inquirer/confirm" "^3.0.0" - "@mswjs/cookies" "^1.1.0" - "@mswjs/interceptors" "^0.29.0" + "@mswjs/interceptors" "^0.35.8" "@open-draft/until" "^2.1.0" "@types/cookie" "^0.6.0" "@types/statuses" "^2.0.4" @@ -5020,7 +5129,7 @@ msw@^2.0.8: headers-polyfill "^4.0.2" is-node-process "^1.2.0" outvariant "^1.4.2" - path-to-regexp "^6.2.0" + path-to-regexp "^6.3.0" strict-event-emitter "^0.5.1" type-fest "^4.9.0" yargs "^17.7.2" @@ -5050,11 +5159,23 @@ nice-try@^1.0.4: resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== +node-domexception@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" + integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== + node-fetch-native@^1.6.4: version "1.6.4" resolved "https://registry.yarnpkg.com/node-fetch-native/-/node-fetch-native-1.6.4.tgz#679fc8fd8111266d47d7e72c379f1bed9acff06e" integrity sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ== +node-fetch@2.7.0, node-fetch@^2.6.7: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + node-forge@^1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -5165,6 +5286,19 @@ onetime@^6.0.0: dependencies: mimic-fn "^4.0.0" +openai@^4.63.0: + version "4.63.0" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.63.0.tgz#cabe7223788157c96c818317cc361386807157f7" + integrity sha512-Y9V4KODbmrOpqiOmCDVnPfMxMqKLOx8Hwcdn/r8mePq4yv7FSXGnxCs8/jZKO7zCB/IVPWihpJXwJNAIOEiZ2g== + dependencies: + "@types/node" "^18.11.18" + "@types/node-fetch" "^2.6.4" + abort-controller "^3.0.0" + agentkeepalive "^4.2.1" + form-data-encoder "1.7.2" + formdata-node "^4.3.2" + node-fetch "^2.6.7" + optionator@^0.9.3: version "0.9.4" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" @@ -5177,10 +5311,10 @@ optionator@^0.9.3: type-check "^0.4.0" word-wrap "^1.2.5" -outvariant@^1.2.1, outvariant@^1.4.0, outvariant@^1.4.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.2.tgz#f54f19240eeb7f15b28263d5147405752d8e2066" - integrity sha512-Ou3dJ6bA/UJ5GVHxah4LnqDwZRwAmWxrG3wtrHrbGnP4RnLCtA64A4F+ae7Y8ww660JaddSoArUR5HjipWSHAQ== +outvariant@^1.2.1, outvariant@^1.4.0, outvariant@^1.4.2, outvariant@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.3.tgz#221c1bfc093e8fec7075497e7799fdbf43d14873" + integrity sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA== p-limit@^2.2.0: version "2.3.0" @@ -5318,6 +5452,11 @@ path-to-regexp@^6.2.0: resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.2.tgz#324377a83e5049cbecadc5554d6a63a9a4866b36" integrity sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw== +path-to-regexp@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4" + integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ== + path-type@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" @@ -5409,7 +5548,7 @@ prettier@3.3.2: resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.3.2.tgz#03ff86dc7c835f2d2559ee76876a3914cec4a90a" integrity sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA== -pretty-format@^29.7.0: +pretty-format@^29.0.0, pretty-format@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== @@ -5430,6 +5569,11 @@ printable-characters@^1.0.42: resolved "https://registry.yarnpkg.com/printable-characters/-/printable-characters-1.0.42.tgz#3f18e977a9bd8eb37fcc4ff5659d7be90868b3d8" integrity sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ== +process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== + prompts@^2.0.1: version "2.4.2" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" @@ -5438,7 +5582,12 @@ prompts@^2.0.1: kleur "^3.0.3" sisteransi "^1.0.5" -punycode@^2.1.0: +psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^2.1.0, punycode@^2.1.1: version "2.3.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== @@ -5448,6 +5597,18 @@ pure-rand@^6.0.0: resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.1.0.tgz#d173cf23258231976ccbdb05247c9787957604f2" integrity sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA== +qs@6.11.2: + version "6.11.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.2.tgz#64bea51f12c1f5da1bc01496f48ffcff7c69d7d9" + integrity sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA== + dependencies: + side-channel "^1.0.4" + +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -5476,6 +5637,17 @@ readable-stream@^3.4.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^4.5.2: + version "4.5.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.5.2.tgz#9e7fc4c45099baeed934bff6eb97ba6cf2729e09" + integrity sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g== + dependencies: + abort-controller "^3.0.0" + buffer "^6.0.3" + events "^3.3.0" + process "^0.11.10" + string_decoder "^1.3.0" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -5513,6 +5685,11 @@ require-from-string@^2.0.2: resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" @@ -5936,7 +6113,7 @@ string.prototype.trimstart@^1.0.8: define-properties "^1.2.1" es-object-atoms "^1.0.0" -string_decoder@^1.1.1: +string_decoder@^1.1.1, string_decoder@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== @@ -6086,6 +6263,16 @@ to-space-case@^1.0.0: dependencies: to-no-case "^1.0.0" +tough-cookie@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.4.tgz#945f1461b45b5a8c76821c33ea49c3ac192c1b36" + integrity sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" @@ -6134,11 +6321,6 @@ tsx@4.15.6: optionalDependencies: fsevents "~2.3.3" -tunnel@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" - integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== - type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" @@ -6157,9 +6339,9 @@ type-fest@^0.21.3: integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^4.9.0: - version "4.18.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.18.3.tgz#5249f96e7c2c3f0f1561625f54050e343f1c8f68" - integrity sha512-Q08/0IrpvM+NMY9PA2rti9Jb+JejTddwmwmVQGskAlhtcrw1wsRzoR6ode6mR+OAabNa75w/dxedSUY2mlphaQ== + version "4.26.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.26.1.tgz#a4a17fa314f976dd3e6d6675ef6c775c16d7955e" + integrity sha512-yOGpmOAL7CkKe/91I5O3gPICmJNLJ1G4zFYVAsRHg7M64biSnPtRj0WNQt++bRkjYOqjWXrhnUw1utzmVErAdg== typebox-validators@0.3.5: version "0.3.5" @@ -6244,7 +6426,12 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== -undici@^5.25.4, undici@^5.28.2: +undici-types@~6.19.2: + version "6.19.8" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" + integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== + +undici@^5.28.2: version "5.28.4" resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" integrity sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g== @@ -6280,6 +6467,11 @@ universal-user-agent@^6.0.0: resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + update-browserslist-db@^1.0.13: version "1.0.16" resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz#f6d489ed90fb2f07d67784eb3f53d7891f736356" @@ -6295,6 +6487,19 @@ uri-js@^4.2.2, uri-js@^4.4.1: dependencies: punycode "^2.1.0" +url-join@4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7" + integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA== + +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -6327,6 +6532,19 @@ vlq@^0.2.1: resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== +voyageai@^0.0.1-5: + version "0.0.1-5" + resolved "https://registry.yarnpkg.com/voyageai/-/voyageai-0.0.1-5.tgz#e0457d991784900c16e4cdf095654f195d62fdf2" + integrity sha512-IuXSXM3l9J3NIq+MLHXacG/yhswpEgWIu9eBqoFqMRnFiDx00dLL62OWg6WqVSipddZLwFeWH1Kaj56x5eqhOQ== + dependencies: + form-data "^4.0.0" + formdata-node "^6.0.3" + js-base64 "3.7.2" + node-fetch "2.7.0" + qs "6.11.2" + readable-stream "^4.5.2" + url-join "4.0.1" + vscode-languageserver-textdocument@^1.0.11: version "1.0.11" resolved "https://registry.yarnpkg.com/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.11.tgz#0822a000e7d4dc083312580d7575fe9e3ba2e2bf" @@ -6351,6 +6569,11 @@ wcwidth@^1.0.1: dependencies: defaults "^1.0.3" +web-streams-polyfill@4.0.0-beta.3: + version "4.0.0-beta.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" + integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" @@ -6479,11 +6702,16 @@ write-file-atomic@^4.0.2: imurmurhash "^0.1.4" signal-exit "^3.0.7" -ws@^8.11.0, ws@^8.14.2: +ws@^8.11.0: version "8.17.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.0.tgz#d145d18eca2ed25aaf791a183903f7be5e295fea" integrity sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow== +ws@^8.14.2: + version "8.18.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" + integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== + xdg-basedir@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-5.1.0.tgz#1efba19425e73be1bc6f2a6ceb52a3d2c884c0c9" @@ -6547,6 +6775,11 @@ yocto-queue@^1.0.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== +yoctocolors-cjs@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz#f4b905a840a37506813a7acaa28febe97767a242" + integrity sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA== + youch@^3.2.2: version "3.3.3" resolved "https://registry.yarnpkg.com/youch/-/youch-3.3.3.tgz#50cfdf5bc395ce664a5073e31b712ff4a859d928"