From 61799d01e59658bc5241c3decaedfda310037adb Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 23 Oct 2024 23:00:43 +0100 Subject: [PATCH 01/59] chore: use default wrangler dev port, init handler --- package.json | 4 ++-- src/handlers/pull-precheck.ts | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 src/handlers/pull-precheck.ts diff --git a/package.json b/package.json index 94d1357..2b159aa 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ "knip-ci": "knip --no-exit-code --reporter json --config .github/knip.ts", "prepare": "husky install", "test": "jest --setupFiles dotenv/config --coverage", - "worker": "wrangler dev --env dev --port 5000" + "worker": "wrangler dev --env dev --port 4000" }, "keywords": [ "typescript", @@ -84,4 +84,4 @@ "@commitlint/config-conventional" ] } -} +} \ No newline at end of file diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts new file mode 100644 index 0000000..dee86b5 --- /dev/null +++ b/src/handlers/pull-precheck.ts @@ -0,0 +1,6 @@ +import { Context } from "../types"; + +export async function performPullPrecheck(context: Context) { + + +} \ No newline at end of file From 86e78522f162cd789a30cfb989c35ff7a96e0468 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 23 Oct 2024 23:01:33 +0100 Subject: [PATCH 02/59] chore: remove .d.ts from manually written type file --- src/types/{llm.d.ts => llm.ts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/types/{llm.d.ts => llm.ts} (100%) diff --git a/src/types/llm.d.ts b/src/types/llm.ts similarity index 100% rename from src/types/llm.d.ts rename to src/types/llm.ts From 9001723ea7fcf1c9772fa2d29bc1b5a999c41b41 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 23 Oct 2024 23:08:28 +0100 Subject: [PATCH 03/59] chore: add new supported events, type handler --- .cspell.json | 3 ++- .github/workflows/compute.yml | 10 +++++----- .github/workflows/update-configuration.yml | 2 +- manifest.json | 2 +- package.json | 2 +- src/handlers/pull-precheck.ts | 9 ++++----- src/types/context.ts | 2 +- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.cspell.json b/.cspell.json index bbe91d8..41d23eb 100644 --- a/.cspell.json +++ b/.cspell.json @@ -30,7 +30,8 @@ "mixtral", "nemo", "Reranking", - "mistralai" + "mistralai", + "Precheck" ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], diff --git a/.github/workflows/compute.yml b/.github/workflows/compute.yml index 285665e..533c5ec 100644 --- a/.github/workflows/compute.yml +++ b/.github/workflows/compute.yml @@ -45,8 +45,8 @@ jobs: run: yarn tsx ./src/main.ts id: command-ask env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL }} - SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} - VOYAGEAI_API_KEY: ${{ secrets.VOYAGEAI_API_KEY }} - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - UBIQUITY_OS_APP_NAME: ${{ secrets.UBIQUITY_OS_APP_NAME }} \ No newline at end of file + SUPABASE_URL: ${{ secrets.SUPABASE_URL }} + SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} + VOYAGEAI_API_KEY: ${{ secrets.VOYAGEAI_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + UBIQUITY_OS_APP_NAME: ${{ secrets.UBIQUITY_OS_APP_NAME }} diff --git a/.github/workflows/update-configuration.yml b/.github/workflows/update-configuration.yml index 2d366d6..b92a487 100644 --- a/.github/workflows/update-configuration.yml +++ b/.github/workflows/update-configuration.yml @@ -18,4 +18,4 @@ jobs: commitMessage: "chore: updated manifest.json and dist build" nodeVersion: "20.10.0" env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/manifest.json b/manifest.json index 5d6ce58..5c92a0c 100644 --- a/manifest.json +++ b/manifest.json @@ -1,5 +1,5 @@ { "name": "command-ask", "description": "A highly context aware organization integrated chatbot", - "ubiquity:listeners": ["issue_comment.created"] + "ubiquity:listeners": ["issue_comment.created", "pull_request.opened", "pull_request.ready_for_review"] } diff --git a/package.json b/package.json index 2b159aa..32061fa 100644 --- a/package.json +++ b/package.json @@ -84,4 +84,4 @@ "@commitlint/config-conventional" ] } -} \ No newline at end of file +} diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index dee86b5..16f5b46 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -1,6 +1,5 @@ -import { Context } from "../types"; +import { Context, SupportedEvents } from "../types"; -export async function performPullPrecheck(context: Context) { - - -} \ No newline at end of file +export async function performPullPrecheck( + context: Context<"pull_request.opened" | "pull_request.ready_for_review", SupportedEvents["pull_request.opened" | "pull_request.ready_for_review"]> +) {} diff --git a/src/types/context.ts b/src/types/context.ts index 73f74b7..6998ddd 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -5,7 +5,7 @@ import { Logs } from "@ubiquity-dao/ubiquibot-logger"; import { Env } from "./env"; import { createAdapters } from "../adapters"; -export type SupportedEventsU = "issue_comment.created"; +export type SupportedEventsU = "issue_comment.created" | "pull_request.opened" | "pull_request.ready_for_review"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; From 53d9a9665046ce42214c5ce536fba34a9a6046dd Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 23 Oct 2024 23:14:46 +0100 Subject: [PATCH 04/59] chore: gitignore temp payloads --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 12274bf..c3dec92 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ cypress/screenshots script.ts .wrangler test-dashboard.md +payloads.json \ No newline at end of file From 65a3a364ce2158611a03f5ac89a484208e36b366 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 01:02:51 +0100 Subject: [PATCH 05/59] chore: submitCodeReview handler --- src/helpers/pull-requests.ts | 24 ++++++++++++++++++++++++ src/types/pull-requests.ts | 1 + 2 files changed, 25 insertions(+) create mode 100644 src/helpers/pull-requests.ts create mode 100644 src/types/pull-requests.ts diff --git a/src/helpers/pull-requests.ts b/src/helpers/pull-requests.ts new file mode 100644 index 0000000..b26e728 --- /dev/null +++ b/src/helpers/pull-requests.ts @@ -0,0 +1,24 @@ +import { Context } from "../types"; +import { CodeReviewStatus } from "../types/pull-requests"; + +export async function submitCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">, review: string, status: CodeReviewStatus) { + const { logger, payload } = context + const { number, sender, organization, repository, action } = payload + const { owner, name } = repository + + logger.info(`${organization}/${repository}#${number} - ${action} - ${sender.login} - ${review}`); + + try { + const response = await context.octokit.pulls.createReview({ + owner: owner.login, + repo: name, + pull_number: number, + body: review, + event: status + }); + + logger.info(`Code review submitted: ${response.data.html_url}`); + } catch (er) { + throw logger.error("Failed to submit code review", { err: er }); + } +} \ No newline at end of file diff --git a/src/types/pull-requests.ts b/src/types/pull-requests.ts new file mode 100644 index 0000000..9b176b3 --- /dev/null +++ b/src/types/pull-requests.ts @@ -0,0 +1 @@ +export type CodeReviewStatus = "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; From f02dafada24d838149a0ac1112facb53ce904d56 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 01:21:46 +0100 Subject: [PATCH 06/59] chore: gql for task relations --- package.json | 1 + src/handlers/pull-precheck.ts | 29 +++++++++++++++++++++++- src/helpers/callback-proxy.ts | 3 +++ src/helpers/gql-functions.ts | 29 ++++++++++++++++++++++++ src/helpers/gql-queries.ts | 42 +++++++++++++++++++++++++++++++++++ yarn.lock | 22 +++++++++++++++++- 6 files changed, 124 insertions(+), 2 deletions(-) create mode 100644 src/helpers/gql-functions.ts create mode 100644 src/helpers/gql-queries.ts diff --git a/package.json b/package.json index 32061fa..9ea0fba 100644 --- a/package.json +++ b/package.json @@ -28,6 +28,7 @@ ], "dependencies": { "@mswjs/data": "^0.16.2", + "@octokit/graphql-schema": "^15.25.0", "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index 16f5b46..8e5a880 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -1,5 +1,32 @@ import { Context, SupportedEvents } from "../types"; +import { CallbackResult } from "../types/proxy"; +/** + +Contributor must open as draft first then ready it for review. +Context is: issue spec and PR diff +output: what's missing compared to the spec, review as requested changes and convert to draft. Pass = commented status. +conditions: +- collaborator converts the PR, bot should not interact again +- one review per day + */ export async function performPullPrecheck( context: Context<"pull_request.opened" | "pull_request.ready_for_review", SupportedEvents["pull_request.opened" | "pull_request.ready_for_review"]> -) {} +): Promise { + const { logger, payload, eventName } = context + const { pull_request } = payload + + if (pull_request.draft) { + return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; + } + + // fetch the Task spec + + + + + + + + return { status: 200, reason: "success" }; +} diff --git a/src/helpers/callback-proxy.ts b/src/helpers/callback-proxy.ts index a50da5e..d001983 100644 --- a/src/helpers/callback-proxy.ts +++ b/src/helpers/callback-proxy.ts @@ -1,4 +1,5 @@ import { issueCommentCreatedCallback } from "../handlers/comment-created-callback"; +import { performPullPrecheck } from "../handlers/pull-precheck"; import { Context, SupportedEventsU } from "../types"; import { ProxyCallbacks } from "../types/proxy"; import { bubbleUpErrorComment } from "./errors"; @@ -12,6 +13,8 @@ import { bubbleUpErrorComment } from "./errors"; */ const callbacks = { "issue_comment.created": [issueCommentCreatedCallback], + "pull_request.opened": [performPullPrecheck], + "pull_request.ready_for_review": [performPullPrecheck], } as ProxyCallbacks; /** diff --git a/src/helpers/gql-functions.ts b/src/helpers/gql-functions.ts new file mode 100644 index 0000000..59572de --- /dev/null +++ b/src/helpers/gql-functions.ts @@ -0,0 +1,29 @@ +import { Octokit } from "@octokit/rest"; +import { closedByPullRequestsReferences, IssueLinkedToPr } from "./gql-queries"; + +export async function collectIssuesToBeClosedByThisPr( + octokit: Octokit, + issue: { + owner: string; + repo: string; + issue_number: number; + } +) { + const { owner, repo, issue_number } = issue; + + if (!issue_number) { + throw new Error("[collectIssuesToBeClosedByThisPr]: issue_number is required"); + } + try { + const result = await octokit.graphql(closedByPullRequestsReferences, { + owner, + repo, + issue_number, + }); + + return result.repository.issue.closedByPullRequestsReferences.edges.map((edge) => edge.node); + } catch { + // probably not found/deleted + return []; + } +} diff --git a/src/helpers/gql-queries.ts b/src/helpers/gql-queries.ts new file mode 100644 index 0000000..9c5e89d --- /dev/null +++ b/src/helpers/gql-queries.ts @@ -0,0 +1,42 @@ +import { User, PullRequest } from "@octokit/graphql-schema"; + +type ClosedByPullRequestsReferences = { + node: Pick & Pick; +}; + +export type IssueLinkedToPr = { + repository: { + issue: { + closedByPullRequestsReferences: { + edges: ClosedByPullRequestsReferences[]; + }; + }; + }; +}; + + +export const closedByPullRequestsReferences = /* GraphQL */ ` + query collectLinkedPullRequests($owner: String!, $repo: String!, $issue_number: Int!) { + repository(owner: $owner, name: $repo) { + issue(number: $issue_number) { + closedByPullRequestsReferences(first: 100, includeClosedPrs: true) { + edges { + node { + url + title + body + state + number + author { + login + ... on User { + id: databaseId + } + } + } + } + } + } + } + } +`; diff --git a/yarn.lock b/yarn.lock index 8dccaee..e149d26 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1654,6 +1654,14 @@ "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" +"@octokit/graphql-schema@^15.25.0": + version "15.25.0" + resolved "https://registry.yarnpkg.com/@octokit/graphql-schema/-/graphql-schema-15.25.0.tgz#30bb8ecc494c249650991b33f2f0d9332dbe87e9" + integrity sha512-aqz9WECtdxVWSqgKroUu9uu+CRt5KnfErWs0dBPKlTdrreAeWzS5NRu22ZVcGdPP7s3XDg2Gnf5iyoZPCRZWmQ== + dependencies: + graphql "^16.0.0" + graphql-tag "^2.10.3" + "@octokit/graphql@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.1.0.tgz#9bc1c5de92f026648131f04101cab949eeffe4e0" @@ -3897,7 +3905,14 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -graphql@^16.8.1: +graphql-tag@^2.10.3: + version "2.12.6" + resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.6.tgz#d441a569c1d2537ef10ca3d1633b48725329b5f1" + integrity sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg== + dependencies: + tslib "^2.1.0" + +graphql@^16.0.0, graphql@^16.8.1: version "16.9.0" resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.9.0.tgz#1c310e63f16a49ce1fbb230bd0a000e99f6f115f" integrity sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw== @@ -6368,6 +6383,11 @@ tsconfig-paths@^4.2.0: minimist "^1.2.6" strip-bom "^3.0.0" +tslib@^2.1.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.0.tgz#d124c86c3c05a40a91e6fdea4021bd31d377971b" + integrity sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA== + tslib@^2.2.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" From f81eca0f07791222be5825d3e682f89269d7b83c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:05:58 +0100 Subject: [PATCH 07/59] chore: create sys msg fn, use array joins, update type name --- src/adapters/openai/helpers/completions.ts | 37 ++++++++++++++-------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index f68f305..25768bc 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -3,7 +3,7 @@ import { Context } from "../../../types"; import { SuperOpenAi } from "./openai"; const MAX_TOKENS = 7000; -export interface CompletionsType { +export interface ResponseFromLlm { answer: string; tokenUsage: { input: number; @@ -20,14 +20,34 @@ export class Completions extends SuperOpenAi { this.context = context; } + private _createSystemMessage(systemMessage: string, additionalContext: string[], localContext: string[], groundTruths: string[], botName: string) { + // safer to use array join than string concatenation + const parts = [ + "You Must obey the following ground truths: [", + groundTruths.join(":"), + "]\n", + systemMessage, + "Your name is : ", + botName, + "\n", + "Primary Context: ", + additionalContext.join("\n"), + "\nLocal Context: ", + localContext.join("\n"), + ]; + + return parts.join("\n"); + } + async createCompletion( + systemMessage: string, prompt: string, model: string = "o1-mini", additionalContext: string[], localContext: string[], groundTruths: string[], botName: string - ): Promise { + ): Promise { const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ model: model, messages: [ @@ -36,18 +56,7 @@ export class Completions extends SuperOpenAi { content: [ { type: "text", - text: - "You Must obey the following ground truths: [" + - groundTruths.join(":") + - "]\n" + - "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + - "Your name is : " + - botName + - "\n" + - "Primary Context: " + - additionalContext.join("\n") + - "\nLocal Context: " + - localContext.join("\n"), + text: this._createSystemMessage(systemMessage, additionalContext, localContext, groundTruths, botName), }, ], }, From 89cbd3f840cadb8c39f330185656ce903b5fa9d9 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:08:11 +0100 Subject: [PATCH 08/59] chore: improve sys msg readability, move to own file --- src/adapters/openai/helpers/prompts.ts | 32 ++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 src/adapters/openai/helpers/prompts.ts diff --git a/src/adapters/openai/helpers/prompts.ts b/src/adapters/openai/helpers/prompts.ts new file mode 100644 index 0000000..d40d733 --- /dev/null +++ b/src/adapters/openai/helpers/prompts.ts @@ -0,0 +1,32 @@ +export const DEFAULT_SYSTEM_MESSAGE = `You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions. + +# Steps + +1. **Understand Context**: Review the chat history and any similar provided responses to understand the context. +2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus. +3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response. +4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query. +5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary. + +# Output Format + +- Concise and coherent responses in paragraphs that directly address the user's question. +- Incorporate inline code snippets or references from the documentation if relevant. + +# Examples + +**Example 1** + +*Input:* +- Chat History: "What was the original reason for moving the LP tokens?" +- Corpus Excerpts: "It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not." + +*Output:* +"It was due to missing LP tokens issue from the MasterChefV2.1 Contract. + +# Notes + +- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query. +- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.`; + +export const PULL_PRECHECK_SYSTEM_MESSAGE = `Perform code review using the diff and spec.` \ No newline at end of file From eae71c5458277fafbdf1e4acd03acd8000f98fe8 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:09:04 +0100 Subject: [PATCH 09/59] chore: default sys msg and type import --- src/handlers/ask-llm.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index 30112c3..7e6a891 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -1,10 +1,11 @@ import { Context } from "../types"; -import { CompletionsType } from "../adapters/openai/helpers/completions"; +import { ResponseFromLlm } from "../adapters/openai/helpers/completions"; import { CommentSimilaritySearchResult } from "../adapters/supabase/helpers/comment"; import { IssueSimilaritySearchResult } from "../adapters/supabase/helpers/issues"; import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; import { formatChatHistory } from "../helpers/format-chat-history"; import { optimizeContext } from "../helpers/issue"; +import { DEFAULT_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; /** * Asks a question to GPT and returns the response @@ -34,7 +35,7 @@ export async function askQuestion(context: Context, question: string) { * @param formattedChat - The formatted chat history to provide context to GPT * @returns completions - The completions generated by GPT **/ -export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { +export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { const { env: { UBIQUITY_OS_APP_NAME }, config: { model, similarityThreshold }, @@ -63,6 +64,7 @@ export async function askGpt(context: Context, question: string, formattedChat: similarText = similarText.filter((text) => text !== ""); const rerankedText = similarText.length > 0 ? await context.adapters.voyage.reranker.reRankResults(similarText, question) : []; return context.adapters.openai.completions.createCompletion( + DEFAULT_SYSTEM_MESSAGE, question, model, rerankedText, From b28670be4e59450af3adcf94c88ef9de6470c9fd Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:09:44 +0100 Subject: [PATCH 10/59] chore: llm-query-output handler --- src/handlers/comment-created-callback.ts | 21 +++------------------ src/handlers/llm-query-output.ts | 22 ++++++++++++++++++++++ 2 files changed, 25 insertions(+), 18 deletions(-) create mode 100644 src/handlers/llm-query-output.ts diff --git a/src/handlers/comment-created-callback.ts b/src/handlers/comment-created-callback.ts index b366f70..d017780 100644 --- a/src/handlers/comment-created-callback.ts +++ b/src/handlers/comment-created-callback.ts @@ -1,8 +1,7 @@ import { Context, SupportedEvents } from "../types"; -import { addCommentToIssue } from "./add-comment"; -import { askQuestion } from "./ask-llm"; import { CallbackResult } from "../types/proxy"; -import { bubbleUpErrorComment } from "../helpers/errors"; +import { askQuestion } from "./ask-llm"; +import { handleLlmQueryOutput } from "./llm-query-output"; export async function issueCommentCreatedCallback( context: Context<"issue_comment.created", SupportedEvents["issue_comment.created"]> @@ -23,19 +22,5 @@ export async function issueCommentCreatedCallback( return { status: 204, reason: logger.info("Comment is empty. Skipping.").logMessage.raw }; } logger.info(`Asking question: ${question}`); - - try { - const response = await askQuestion(context, question); - const { answer, tokenUsage } = response; - if (!answer) { - throw logger.error(`No answer from OpenAI`); - } - logger.info(`Answer: ${answer}`, { tokenUsage }); - const tokens = `\n\n`; - const commentToPost = answer + tokens; - await addCommentToIssue(context, commentToPost); - return { status: 200, reason: logger.info("Comment posted successfully").logMessage.raw }; - } catch (error) { - throw await bubbleUpErrorComment(context, error, false); - } + return await handleLlmQueryOutput(context, await askQuestion(context, question)); } diff --git a/src/handlers/llm-query-output.ts b/src/handlers/llm-query-output.ts new file mode 100644 index 0000000..be0b8f6 --- /dev/null +++ b/src/handlers/llm-query-output.ts @@ -0,0 +1,22 @@ +import { ResponseFromLlm } from "../adapters/openai/helpers/completions"; +import { bubbleUpErrorComment } from "../helpers/errors"; +import { Context } from "../types"; +import { CallbackResult } from "../types/proxy"; +import { addCommentToIssue } from "./add-comment"; + +export async function handleLlmQueryOutput(context: Context, llmResponse: ResponseFromLlm): Promise { + const { logger } = context; + try { + const { answer, tokenUsage } = llmResponse; + if (!answer) { + throw logger.error(`No answer from OpenAI`); + } + logger.info(`Answer: ${answer}`, { tokenUsage }); + const tokens = `\n\n`; + const commentToPost = answer + tokens; + await addCommentToIssue(context, commentToPost); + return { status: 200, reason: logger.info("Comment posted successfully").logMessage.raw }; + } catch (error) { + throw await bubbleUpErrorComment(context, error, false); + } +} From 2d265344ba20c608b9f96a6cb4ac1815e93d740a Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:10:45 +0100 Subject: [PATCH 11/59] chore: formatting --- src/helpers/gql-functions.ts | 42 ++++++++++++++++++------------------ src/helpers/gql-queries.ts | 15 ++++++------- src/helpers/pull-requests.ts | 34 ++++++++++++++--------------- tests/main.test.ts | 4 ++-- 4 files changed, 47 insertions(+), 48 deletions(-) diff --git a/src/helpers/gql-functions.ts b/src/helpers/gql-functions.ts index 59572de..ea91770 100644 --- a/src/helpers/gql-functions.ts +++ b/src/helpers/gql-functions.ts @@ -2,28 +2,28 @@ import { Octokit } from "@octokit/rest"; import { closedByPullRequestsReferences, IssueLinkedToPr } from "./gql-queries"; export async function collectIssuesToBeClosedByThisPr( - octokit: Octokit, - issue: { - owner: string; - repo: string; - issue_number: number; - } + octokit: Octokit, + issue: { + owner: string; + repo: string; + issue_number: number; + } ) { - const { owner, repo, issue_number } = issue; + const { owner, repo, issue_number } = issue; - if (!issue_number) { - throw new Error("[collectIssuesToBeClosedByThisPr]: issue_number is required"); - } - try { - const result = await octokit.graphql(closedByPullRequestsReferences, { - owner, - repo, - issue_number, - }); + if (!issue_number) { + throw new Error("[collectIssuesToBeClosedByThisPr]: issue_number is required"); + } + try { + const result = await octokit.graphql(closedByPullRequestsReferences, { + owner, + repo, + issue_number, + }); - return result.repository.issue.closedByPullRequestsReferences.edges.map((edge) => edge.node); - } catch { - // probably not found/deleted - return []; - } + return result.repository.issue.closedByPullRequestsReferences.edges.map((edge) => edge.node); + } catch { + // probably not found/deleted + return []; + } } diff --git a/src/helpers/gql-queries.ts b/src/helpers/gql-queries.ts index 9c5e89d..1179196 100644 --- a/src/helpers/gql-queries.ts +++ b/src/helpers/gql-queries.ts @@ -1,20 +1,19 @@ import { User, PullRequest } from "@octokit/graphql-schema"; type ClosedByPullRequestsReferences = { - node: Pick & Pick; + node: Pick & Pick; }; export type IssueLinkedToPr = { - repository: { - issue: { - closedByPullRequestsReferences: { - edges: ClosedByPullRequestsReferences[]; - }; - }; + repository: { + issue: { + closedByPullRequestsReferences: { + edges: ClosedByPullRequestsReferences[]; + }; }; + }; }; - export const closedByPullRequestsReferences = /* GraphQL */ ` query collectLinkedPullRequests($owner: String!, $repo: String!, $issue_number: Int!) { repository(owner: $owner, name: $repo) { diff --git a/src/helpers/pull-requests.ts b/src/helpers/pull-requests.ts index b26e728..86217fc 100644 --- a/src/helpers/pull-requests.ts +++ b/src/helpers/pull-requests.ts @@ -2,23 +2,23 @@ import { Context } from "../types"; import { CodeReviewStatus } from "../types/pull-requests"; export async function submitCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">, review: string, status: CodeReviewStatus) { - const { logger, payload } = context - const { number, sender, organization, repository, action } = payload - const { owner, name } = repository + const { logger, payload } = context; + const { number, sender, organization, repository, action } = payload; + const { owner, name } = repository; - logger.info(`${organization}/${repository}#${number} - ${action} - ${sender.login} - ${review}`); + logger.info(`${organization}/${repository}#${number} - ${action} - ${sender.login} - ${review}`); - try { - const response = await context.octokit.pulls.createReview({ - owner: owner.login, - repo: name, - pull_number: number, - body: review, - event: status - }); + try { + const response = await context.octokit.pulls.createReview({ + owner: owner.login, + repo: name, + pull_number: number, + body: review, + event: status, + }); - logger.info(`Code review submitted: ${response.data.html_url}`); - } catch (er) { - throw logger.error("Failed to submit code review", { err: er }); - } -} \ No newline at end of file + logger.info(`Code review submitted: ${response.data.html_url}`); + } catch (er) { + throw logger.error("Failed to submit code review", { err: er }); + } +} diff --git a/tests/main.test.ts b/tests/main.test.ts index 935a113..fd25aa7 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -11,7 +11,7 @@ import { askQuestion } from "../src/handlers/ask-llm"; import { runPlugin } from "../src/plugin"; import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; import { envSchema } from "../src/types/env"; -import { CompletionsType } from "../src/adapters/openai/helpers/completions"; +import { ResponseFromLlm } from "../src/adapters/openai/helpers/completions"; const TEST_QUESTION = "what is pi?"; const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?"; @@ -393,7 +393,7 @@ function createContext(body = TEST_SLASH_COMMAND) { }, openai: { completions: { - createCompletion: async (): Promise => { + createCompletion: async (): Promise => { return { answer: "This is a mock answer for the chat", tokenUsage: { From 2c0325c687586e3c60770763189e39feb9318a17 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 02:11:30 +0100 Subject: [PATCH 12/59] feat: basis for pull precheck --- src/handlers/pull-precheck.ts | 58 ++++++++++++++++++++++++++++++++--- 1 file changed, 53 insertions(+), 5 deletions(-) diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index 8e5a880..ba39e1a 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -1,8 +1,11 @@ +import { PULL_PRECHECK_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; +import { collectIssuesToBeClosedByThisPr } from "../helpers/gql-functions"; +import { fetchPullRequestDiff } from "../helpers/issue-fetching"; import { Context, SupportedEvents } from "../types"; import { CallbackResult } from "../types/proxy"; +import { handleLlmQueryOutput } from "./llm-query-output"; /** - Contributor must open as draft first then ready it for review. Context is: issue spec and PR diff output: what's missing compared to the spec, review as requested changes and convert to draft. Pass = commented status. @@ -13,20 +16,65 @@ conditions: export async function performPullPrecheck( context: Context<"pull_request.opened" | "pull_request.ready_for_review", SupportedEvents["pull_request.opened" | "pull_request.ready_for_review"]> ): Promise { - const { logger, payload, eventName } = context - const { pull_request } = payload + const { + logger, + payload, + config: { model }, + env: { UBIQUITY_OS_APP_NAME }, + } = context; + const { + pull_request, + repository: { + owner: { login: repoOwner }, + name: repoName, + }, + } = payload; if (pull_request.draft) { return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; } - // fetch the Task spec + const closingIssues = await collectIssuesToBeClosedByThisPr(context.octokit, { + owner: pull_request.base.repo.owner.login, + repo: pull_request.base.repo.name, + issue_number: pull_request.number, + }); + if (closingIssues.length === 0) { + throw logger.error("This pull request does not have an linked task, please link one before merging."); + } + if (closingIssues.length > 1) { + // May require some sort of elegant handling + } + const taskSpec = closingIssues[0].body; + if (!taskSpec) { + throw logger.error("Task Spec not found, please link one before merging."); + } + const prDiff = await fetchPullRequestDiff(context, repoOwner, repoName, pull_request.number); + if (!prDiff) { + throw logger.error("PR Diff not found"); + } + const question = "What's missing compared to the spec?"; + const additionalContext: string[] = [prDiff, taskSpec]; + const localContext: string[] = []; + /** + * These should be dynamic on every query + */ + const groundTruths: string[] = []; + const llmResponse = await context.adapters.openai.completions.createCompletion( + PULL_PRECHECK_SYSTEM_MESSAGE, + question, + model, + additionalContext, + localContext, + groundTruths, + UBIQUITY_OS_APP_NAME + ); - return { status: 200, reason: "success" }; + return handleLlmQueryOutput(context, llmResponse); } From e6586a48570c173a7d797c9ebc6c16c884633727 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 04:25:37 +0100 Subject: [PATCH 13/59] feat: dynamic ground truths --- src/handlers/find-ground-truths.ts | 86 ++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 src/handlers/find-ground-truths.ts diff --git a/src/handlers/find-ground-truths.ts b/src/handlers/find-ground-truths.ts new file mode 100644 index 0000000..c861463 --- /dev/null +++ b/src/handlers/find-ground-truths.ts @@ -0,0 +1,86 @@ +import OpenAI from "openai"; +import { Context } from "../types"; +import { logger } from "../helpers/errors"; + +const FIND_GROUND_TRUTHS_SYSTEM_MESSAGE = `Using the input provided, your goal is to produce an array of strings that represent "Ground Truths." + These ground truths are high-level abstractions that encapsulate the key aspects of the task. + They serve to guide and inform our code review model's interpretation of the task by providing clear, concise, and explicit insights. + + Each ground truth should: + - Be succinct and easy to understand. + - Directly pertain to the task at hand. + - Focus on essential requirements, behaviors, or assumptions involved in the task. + + Example: + Task: Implement a function that adds two numbers. + Ground Truths: + - The function should accept two numerical inputs. + - The function should return the sum of the two inputs. + - Inputs must be validated to ensure they are numbers. + + Based on the given task, generate similar ground truths adhering to a maximum of 10. + + Return a JSON parsable array of strings representing the ground truths, without comment or directive.`; + +function validateGroundTruths(truthsString: string): string[] { + let truths; + try { + truths = JSON.parse(truthsString); + } catch (err) { + throw logger.error("Failed to parse ground truths"); + } + if (!Array.isArray(truths)) { + throw logger.error("Ground truths must be an array"); + } + + if (truths.length > 10) { + throw logger.error("Ground truths must not exceed 10"); + } + + truths.forEach((truth: string) => { + if (typeof truth !== "string") { + throw logger.error("Each ground truth must be a string"); + } + }); + + return truths; +} + +export async function findGroundTruths(context: Context, groundTruthSource: string) { + const { + env: { OPENAI_API_KEY }, + config: { openAiBaseUrl, model }, + } = context; + + const openAi = new OpenAI({ + apiKey: OPENAI_API_KEY, + ...(openAiBaseUrl && { baseURL: openAiBaseUrl }), + }); + + const res = await openAi.chat.completions.create({ + messages: [ + { + role: "system", + content: FIND_GROUND_TRUTHS_SYSTEM_MESSAGE, + }, + { + role: "user", + content: groundTruthSource, + }, + ], + /** + * I've used the config model here but in my opinion, + * we should optimize this for a quicker response which + * means no advanced reasoning models. rfc + */ + model: model, + }); + + const output = res.choices[0].message.content; + + if (!output) { + throw logger.error("Failed to produce a ground truths response"); + } + + return validateGroundTruths(output); +} From 42934ff77ce33953b9c83671093f19dd4eff3871 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 04:29:44 +0100 Subject: [PATCH 14/59] chore: get issue no from payload util --- src/handlers/add-comment.ts | 4 ++- src/helpers/get-issue-no-from-payload.ts | 34 ++++++++++++++++++++++++ src/helpers/issue-fetching.ts | 19 ++++++++----- 3 files changed, 49 insertions(+), 8 deletions(-) create mode 100644 src/helpers/get-issue-no-from-payload.ts diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index ec4a731..3291644 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -1,3 +1,4 @@ +import { getIssueNumberFromPayload } from "../helpers/get-issue-no-from-payload"; import { Context } from "../types/context"; /** @@ -7,7 +8,8 @@ import { Context } from "../types/context"; */ export async function addCommentToIssue(context: Context, message: string) { const { payload } = context; - const issueNumber = payload.issue.number; + const issueNumber = getIssueNumberFromPayload(payload); + try { await context.octokit.issues.createComment({ owner: payload.repository.owner.login, diff --git a/src/helpers/get-issue-no-from-payload.ts b/src/helpers/get-issue-no-from-payload.ts new file mode 100644 index 0000000..c4f7976 --- /dev/null +++ b/src/helpers/get-issue-no-from-payload.ts @@ -0,0 +1,34 @@ +import { Context } from "../types"; +import { FetchParams } from "../types/github-types"; +import { logger } from "./errors"; + +export function getIssueNumberFromPayload(payload: Context["payload"], fetchParams?: FetchParams): number { + let issueNumber, owner, repo; + + if (!issueNumber) { + if ("issue" in payload) { + issueNumber = payload.issue.number; + } + + if (!issueNumber && "pull_request" in payload) { + issueNumber = payload.pull_request.number; + } + } + + // takes precedence and overrides the payload + if (fetchParams) { + owner = fetchParams.owner; + repo = fetchParams.repo; + issueNumber = fetchParams.issueNum; + } + + if (!issueNumber) { + throw logger.error(`Error fetching issue`, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNumber, + }); + } + + return issueNumber; +} \ No newline at end of file diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 744e74e..881c709 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -4,6 +4,7 @@ import { IssueWithUser, SimplifiedComment, User } from "../types/github-types"; import { FetchParams, Issue, Comments, LinkedIssues } from "../types/github-types"; import { StreamlinedComment } from "../types/llm"; import { logger } from "./errors"; +import { getIssueNumberFromPayload } from "./get-issue-no-from-payload"; import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, @@ -196,12 +197,14 @@ export async function fetchPullRequestDiff(context: Context, org: string, repo: */ export async function fetchIssue(params: FetchParams): Promise { const { octokit, payload, logger } = params.context; - const { issueNum, owner, repo } = params; + const { owner, repo } = params; + const issueNumber = getIssueNumberFromPayload(payload, params); + try { const response = await octokit.rest.issues.get({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); return response.data as IssueWithUser; } catch (error) { @@ -209,7 +212,7 @@ export async function fetchIssue(params: FetchParams): Promise { error: error as Error, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); return null; } @@ -223,22 +226,24 @@ export async function fetchIssue(params: FetchParams): Promise { */ export async function fetchIssueComments(params: FetchParams) { const { octokit, payload, logger } = params.context; - const { issueNum, owner, repo } = params; + const { owner, repo } = params; const issue = await fetchIssue(params); + const issueNumber = getIssueNumberFromPayload(payload, params); + let comments: Comments = []; try { if (issue?.pull_request) { const response = await octokit.rest.pulls.listReviewComments({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - pull_number: issueNum || payload.issue.number, + pull_number: issueNumber, }); comments = response.data; } else { const response = await octokit.rest.issues.listComments({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); comments = response.data; } @@ -247,7 +252,7 @@ export async function fetchIssueComments(params: FetchParams) { e, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); comments = []; } From 63ab3e407ce23555f7236ea0b045ca3c12e35312 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 04:30:46 +0100 Subject: [PATCH 15/59] chore: precheck handler complete --- src/handlers/pull-precheck.ts | 72 +++++++++++++++++++++++++++++------ 1 file changed, 60 insertions(+), 12 deletions(-) diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index ba39e1a..6db6f24 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -1,8 +1,9 @@ import { PULL_PRECHECK_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; -import { collectIssuesToBeClosedByThisPr } from "../helpers/gql-functions"; -import { fetchPullRequestDiff } from "../helpers/issue-fetching"; +import { checkIfPrClosesIssues } from "../helpers/gql-functions"; +import { fetchIssue, fetchPullRequestDiff } from "../helpers/issue-fetching"; import { Context, SupportedEvents } from "../types"; import { CallbackResult } from "../types/proxy"; +import { findGroundTruths } from "./find-ground-truths"; import { handleLlmQueryOutput } from "./llm-query-output"; /** @@ -34,26 +35,57 @@ export async function performPullPrecheck( return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; } - const closingIssues = await collectIssuesToBeClosedByThisPr(context.octokit, { + const { issues: closingIssues } = await checkIfPrClosesIssues(context.octokit, { owner: pull_request.base.repo.owner.login, repo: pull_request.base.repo.name, - issue_number: pull_request.number, + pr_number: pull_request.number, }); + let taskSpec; + let owner, repo, issueNumber; + if (closingIssues.length === 0) { - throw logger.error("This pull request does not have an linked task, please link one before merging."); - } + const linkedViaBodyHash = pull_request.body?.match(/#(\d+)/g); + const urlMatch = getOwnerRepoIssueNumberFromUrl(pull_request.body); + + if (linkedViaBodyHash?.length) { + const issueNumber = linkedViaBodyHash[0].replace("#", ""); + const issue = await fetchIssue({ context, owner: repoOwner, repo: repoName, issueNum: Number(issueNumber) }); + if (!issue) { + throw logger.error("This pull request does not have an linked task, please link one before merging."); + } - if (closingIssues.length > 1) { - // May require some sort of elegant handling + taskSpec = issue.body; + } + + if (urlMatch && !taskSpec) { + owner = urlMatch.owner; + repo = urlMatch.repo; + issueNumber = urlMatch.issueNumber; + const issue = await fetchIssue({ context, owner, repo, issueNum: Number(issueNumber) }); + if (!issue) { + throw logger.error("This pull request does not have an linked task, please link one before merging."); + } + + taskSpec = issue.body; + } + } else if (closingIssues.length > 1) { + throw logger.error("Multiple tasks linked to this PR, needs investigated to see how best to handle it.", { + closingIssues, + pull_request, + }); + } else { + taskSpec = closingIssues[0].body; } - const taskSpec = closingIssues[0].body; if (!taskSpec) { throw logger.error("Task Spec not found, please link one before merging."); } - const prDiff = await fetchPullRequestDiff(context, repoOwner, repoName, pull_request.number); + const tempOwner = "ubiquity-os-marketplace"; + const tempRepo = "command-ask"; + const tempIssueNumber = 11; + const prDiff = await fetchPullRequestDiff(context, tempOwner, tempRepo, tempIssueNumber); if (!prDiff) { throw logger.error("PR Diff not found"); } @@ -62,9 +94,9 @@ export async function performPullPrecheck( const additionalContext: string[] = [prDiff, taskSpec]; const localContext: string[] = []; /** - * These should be dynamic on every query + * These should be dynamic on every query imo not just here. */ - const groundTruths: string[] = []; + const groundTruths: string[] = await findGroundTruths(context, taskSpec); const llmResponse = await context.adapters.openai.completions.createCompletion( PULL_PRECHECK_SYSTEM_MESSAGE, @@ -76,5 +108,21 @@ export async function performPullPrecheck( UBIQUITY_OS_APP_NAME ); + console.log("llmResponse", llmResponse); + return handleLlmQueryOutput(context, llmResponse); } + +function getOwnerRepoIssueNumberFromUrl(body: string | undefined | null): { owner: string; repo: string; issueNumber: string } | null { + if (!body) return null; + + const regex = /https:\/\/(www\.)?github.com\/(?[\w-]+)\/(?[\w-]+)\/issues\/(?\d+)/i; + const match = body.match(regex); + + if (match && match.groups) { + const { owner, repo, issueNumber } = match.groups; + return { owner, repo, issueNumber }; + } + + return null; +} From 638acbfbfd08c8278fc983f700ebd22c4aa66a6c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 04:32:42 +0100 Subject: [PATCH 16/59] chore: gql updates, format, target: ESNEXT for regex groups --- src/adapters/openai/helpers/prompts.ts | 2 +- src/helpers/gql-functions.ts | 50 +++++++++++++++++++------- src/helpers/gql-queries.ts | 25 +++++++------ src/main.ts | 2 -- tsconfig.json | 4 +-- 5 files changed, 52 insertions(+), 31 deletions(-) diff --git a/src/adapters/openai/helpers/prompts.ts b/src/adapters/openai/helpers/prompts.ts index d40d733..3110adc 100644 --- a/src/adapters/openai/helpers/prompts.ts +++ b/src/adapters/openai/helpers/prompts.ts @@ -29,4 +29,4 @@ export const DEFAULT_SYSTEM_MESSAGE = `You are tasked with assisting as a GitHub - Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query. - Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.`; -export const PULL_PRECHECK_SYSTEM_MESSAGE = `Perform code review using the diff and spec.` \ No newline at end of file +export const PULL_PRECHECK_SYSTEM_MESSAGE = `Perform code review using the diff and spec.`; diff --git a/src/helpers/gql-functions.ts b/src/helpers/gql-functions.ts index ea91770..d97aa98 100644 --- a/src/helpers/gql-functions.ts +++ b/src/helpers/gql-functions.ts @@ -1,29 +1,53 @@ import { Octokit } from "@octokit/rest"; -import { closedByPullRequestsReferences, IssueLinkedToPr } from "./gql-queries"; +import { closedByPullRequestsReferences, IssuesClosedByThisPr } from "./gql-queries"; -export async function collectIssuesToBeClosedByThisPr( +export async function checkIfPrClosesIssues( octokit: Octokit, - issue: { + pr: { owner: string; repo: string; - issue_number: number; + pr_number: number; } ) { - const { owner, repo, issue_number } = issue; + const { owner, repo, pr_number } = pr; - if (!issue_number) { - throw new Error("[collectIssuesToBeClosedByThisPr]: issue_number is required"); + if (!pr_number) { + throw new Error("[checkIfPrClosesIssues]: pr_number is required"); } try { - const result = await octokit.graphql(closedByPullRequestsReferences, { + const result = await octokit.graphql(closedByPullRequestsReferences, { owner, repo, - issue_number, + pr_number, }); - return result.repository.issue.closedByPullRequestsReferences.edges.map((edge) => edge.node); - } catch { - // probably not found/deleted - return []; + const closingIssues = result.repository.issue.closedByPullRequestsReferences.edges.map((edge) => ({ + number: edge.node.number, + title: edge.node.title, + url: edge.node.url, + body: edge.node.body, + repository: { + name: edge.node.name, + owner: edge.node.owner, + }, + })); + + if (closingIssues.length > 0) { + return { + closesIssues: true, + issues: closingIssues, + }; + } else { + return { + closesIssues: false, + issues: [], + }; + } + } catch (error) { + console.error("Error fetching closing issues:", error); + return { + closesIssues: false, + issues: [], + }; } } diff --git a/src/helpers/gql-queries.ts b/src/helpers/gql-queries.ts index 1179196..e87851c 100644 --- a/src/helpers/gql-queries.ts +++ b/src/helpers/gql-queries.ts @@ -1,10 +1,10 @@ -import { User, PullRequest } from "@octokit/graphql-schema"; +import { User, PullRequest, Repository } from "@octokit/graphql-schema"; type ClosedByPullRequestsReferences = { - node: Pick & Pick; + node: Pick & { owner: Pick; name: Pick }; }; -export type IssueLinkedToPr = { +export type IssuesClosedByThisPr = { repository: { issue: { closedByPullRequestsReferences: { @@ -15,21 +15,20 @@ export type IssueLinkedToPr = { }; export const closedByPullRequestsReferences = /* GraphQL */ ` - query collectLinkedPullRequests($owner: String!, $repo: String!, $issue_number: Int!) { + query closingIssuesReferencesQuery($owner: String!, $repo: String!, $pr_number: Int!) { repository(owner: $owner, name: $repo) { - issue(number: $issue_number) { - closedByPullRequestsReferences(first: 100, includeClosedPrs: true) { + pullRequest(number: $pr_number) { + closingIssuesReferences(first: 100) { edges { node { - url + number title + url body - state - number - author { - login - ... on User { - id: databaseId + repository { + name + owner { + login } } } diff --git a/src/main.ts b/src/main.ts index ecdede3..62e2875 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,6 +1,4 @@ -// @ts-expect-error - no types found import * as core from "@actions/core"; -// @ts-expect-error - no types found import * as github from "@actions/github"; import { Value } from "@sinclair/typebox/value"; import { envSchema } from "./types/env"; diff --git a/tsconfig.json b/tsconfig.json index ba9d6b3..add1e99 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -9,8 +9,8 @@ // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ /* Language and Environment */ - "target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + "lib": ["ESNext"] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, // "jsx": "preserve", /* Specify what JSX code is generated. */ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ From 00534fa36ac52355e987d4255d176f06b1b6e86d Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 04:40:56 +0100 Subject: [PATCH 17/59] chore: eslint, remove console.log, type fix --- src/handlers/pull-precheck.ts | 2 -- src/helpers/get-issue-no-from-payload.ts | 46 ++++++++++++------------ src/helpers/gql-functions.ts | 2 +- src/helpers/gql-queries.ts | 4 +-- 4 files changed, 26 insertions(+), 28 deletions(-) diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index 6db6f24..87f2e05 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -108,8 +108,6 @@ export async function performPullPrecheck( UBIQUITY_OS_APP_NAME ); - console.log("llmResponse", llmResponse); - return handleLlmQueryOutput(context, llmResponse); } diff --git a/src/helpers/get-issue-no-from-payload.ts b/src/helpers/get-issue-no-from-payload.ts index c4f7976..006289f 100644 --- a/src/helpers/get-issue-no-from-payload.ts +++ b/src/helpers/get-issue-no-from-payload.ts @@ -3,32 +3,32 @@ import { FetchParams } from "../types/github-types"; import { logger } from "./errors"; export function getIssueNumberFromPayload(payload: Context["payload"], fetchParams?: FetchParams): number { - let issueNumber, owner, repo; + let issueNumber, owner, repo; - if (!issueNumber) { - if ("issue" in payload) { - issueNumber = payload.issue.number; - } - - if (!issueNumber && "pull_request" in payload) { - issueNumber = payload.pull_request.number; - } + if (!issueNumber) { + if ("issue" in payload) { + issueNumber = payload.issue.number; } - // takes precedence and overrides the payload - if (fetchParams) { - owner = fetchParams.owner; - repo = fetchParams.repo; - issueNumber = fetchParams.issueNum; + if (!issueNumber && "pull_request" in payload) { + issueNumber = payload.pull_request.number; } + } - if (!issueNumber) { - throw logger.error(`Error fetching issue`, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNumber, - }); - } + // takes precedence and overrides the payload + if (fetchParams) { + owner = fetchParams.owner; + repo = fetchParams.repo; + issueNumber = fetchParams.issueNum; + } + + if (!issueNumber) { + throw logger.error(`Error fetching issue`, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNumber, + }); + } - return issueNumber; -} \ No newline at end of file + return issueNumber; +} diff --git a/src/helpers/gql-functions.ts b/src/helpers/gql-functions.ts index d97aa98..d555412 100644 --- a/src/helpers/gql-functions.ts +++ b/src/helpers/gql-functions.ts @@ -21,7 +21,7 @@ export async function checkIfPrClosesIssues( pr_number, }); - const closingIssues = result.repository.issue.closedByPullRequestsReferences.edges.map((edge) => ({ + const closingIssues = result.repository.pullRequest.closingIssuesReferences.edges.map((edge) => ({ number: edge.node.number, title: edge.node.title, url: edge.node.url, diff --git a/src/helpers/gql-queries.ts b/src/helpers/gql-queries.ts index e87851c..c21113a 100644 --- a/src/helpers/gql-queries.ts +++ b/src/helpers/gql-queries.ts @@ -6,8 +6,8 @@ type ClosedByPullRequestsReferences = { export type IssuesClosedByThisPr = { repository: { - issue: { - closedByPullRequestsReferences: { + pullRequest: { + closingIssuesReferences: { edges: ClosedByPullRequestsReferences[]; }; }; From 18f467fe14b578243742a1623f8452a78ed420b1 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 04:48:46 +0100 Subject: [PATCH 18/59] chore: fix tests, type context for comment.created fns --- src/handlers/ask-llm.ts | 3 ++- src/helpers/format-chat-history.ts | 2 +- tests/main.test.ts | 12 ++++++------ 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index 7e6a891..cccf7f2 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -14,7 +14,7 @@ import { DEFAULT_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; * @returns The response from GPT * @throws If no question is provided */ -export async function askQuestion(context: Context, question: string) { +export async function askQuestion(context: Context<"issue_comment.created">, question: string) { if (!question) { throw context.logger.error("No question provided"); } @@ -22,6 +22,7 @@ export async function askQuestion(context: Context, question: string) { context, owner: context.payload.repository.owner.login, repo: context.payload.repository.name, + issueNum: context.payload.issue.number, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); context.logger.info(`${formattedChat.join("")}`); diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index b1b5ecb..aa9f13d 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -13,7 +13,7 @@ import { splitKey } from "./issue"; * @returns A promise that resolves to a formatted string representing the chat history. */ export async function formatChatHistory( - context: Context, + context: Context<"issue_comment.created">, streamlined: Record, specAndBodies: Record ): Promise { diff --git a/tests/main.test.ts b/tests/main.test.ts index fd25aa7..91de76e 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -258,13 +258,13 @@ function createContext(body = TEST_SLASH_COMMAND) { const user = db.users.findFirst({ where: { id: { equals: 1 } } }); return { payload: { - issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["issue"], + issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context<"issue_comment.created">["payload"]["issue"], sender: user, - repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["repository"], - comment: { body, user: user } as unknown as Context["payload"]["comment"], + repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context<"issue_comment.created">["payload"]["repository"], + comment: { body, user: user } as unknown as Context<"issue_comment.created">["payload"]["comment"], action: "created" as string, - installation: { id: 1 } as unknown as Context["payload"]["installation"], - organization: { login: "ubiquity" } as unknown as Context["payload"]["organization"], + installation: { id: 1 } as unknown as Context<"issue_comment.created">["payload"]["installation"], + organization: { login: "ubiquity" } as unknown as Context<"issue_comment.created">["payload"]["organization"], }, owner: "ubiquity", repo: "test-repo", @@ -408,5 +408,5 @@ function createContext(body = TEST_SLASH_COMMAND) { }, octokit: new octokit.Octokit(), eventName: "issue_comment.created" as SupportedEventsU, - } as unknown as Context; + } as unknown as Context<"issue_comment.created">; } From e24add89f69b67765ffe33264643a44f1a948b95 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:38:08 +0100 Subject: [PATCH 19/59] chore: pass single object param --- src/adapters/openai/helpers/completions.ts | 26 ++++++++++++++-------- src/handlers/ask-llm.ts | 16 ++++++------- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 25768bc..5a0f1e2 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -39,15 +39,23 @@ export class Completions extends SuperOpenAi { return parts.join("\n"); } - async createCompletion( - systemMessage: string, - prompt: string, - model: string = "o1-mini", - additionalContext: string[], - localContext: string[], - groundTruths: string[], - botName: string - ): Promise { + async createCompletion({ + systemMessage, + prompt, + model = "o1-mini", + additionalContext, + localContext, + groundTruths, + botName, + }: { + systemMessage: string; + prompt: string; + model: string; + additionalContext: string[]; + localContext: string[]; + groundTruths: string[]; + botName: string; + }): Promise { const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ model: model, messages: [ diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index cccf7f2..9328cec 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -64,13 +64,13 @@ export async function askGpt(context: Context, question: string, formattedChat: // const reRankedChat = formattedChat.length > 0 ? await context.adapters.voyage.reranker.reRankResults(formattedChat.filter(text => text !== ""), question, 300) : []; similarText = similarText.filter((text) => text !== ""); const rerankedText = similarText.length > 0 ? await context.adapters.voyage.reranker.reRankResults(similarText, question) : []; - return context.adapters.openai.completions.createCompletion( - DEFAULT_SYSTEM_MESSAGE, - question, + return context.adapters.openai.completions.createCompletion({ + systemMessage: DEFAULT_SYSTEM_MESSAGE, + prompt: question, model, - rerankedText, - formattedChat, - ["typescript", "github", "cloudflare worker", "actions", "jest", "supabase", "openai"], - UBIQUITY_OS_APP_NAME - ); + additionalContext: rerankedText, + localContext: formattedChat, + groundTruths: ["typescript", "github", "cloudflare worker", "actions", "jest", "supabase", "openai"], + botName: UBIQUITY_OS_APP_NAME, + }); } From 788403d6ceb639c11d04e8b13b2c3f325e83b868 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:38:49 +0100 Subject: [PATCH 20/59] chore: cleanup pull-precheck handler --- src/handlers/pull-precheck.ts | 138 +++++++++++----------------------- 1 file changed, 43 insertions(+), 95 deletions(-) diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index 87f2e05..90110ed 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -1,10 +1,31 @@ import { PULL_PRECHECK_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; -import { checkIfPrClosesIssues } from "../helpers/gql-functions"; -import { fetchIssue, fetchPullRequestDiff } from "../helpers/issue-fetching"; +import { fetchPullRequestDiff } from "../helpers/issue-fetching"; +import { getTaskSpecFromPullRequest } from "../helpers/pull-helpers/get-task-spec"; +import { hasCollaboratorConvertedPr } from "../helpers/pull-helpers/has-collaborator-converted"; +import { canPerformReview } from "../helpers/pull-requests"; import { Context, SupportedEvents } from "../types"; import { CallbackResult } from "../types/proxy"; import { findGroundTruths } from "./find-ground-truths"; -import { handleLlmQueryOutput } from "./llm-query-output"; +// import { handleLlmQueryOutput } from "./llm-query-output"; + +export async function performPullPrecheck( + context: Context<"pull_request.opened" | "pull_request.ready_for_review", SupportedEvents["pull_request.opened" | "pull_request.ready_for_review"]> +): Promise { + const { logger, payload } = context; + const { pull_request } = payload; + + if (pull_request.draft) { + return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; + } else if (pull_request.state === "closed") { + return { status: 200, reason: logger.info("PR is closed, no action required").logMessage.raw }; + } else if (!(await canPerformReview(context))) { + return { status: 200, reason: logger.info("Cannot perform review at this time").logMessage.raw }; + } else if (await hasCollaboratorConvertedPr(context)) { + return { status: 200, reason: logger.info("Collaborator has converted the PR, no action required").logMessage.raw }; + } + + return await handleCodeReview(context); +} /** Contributor must open as draft first then ready it for review. @@ -14,113 +35,40 @@ conditions: - collaborator converts the PR, bot should not interact again - one review per day */ -export async function performPullPrecheck( - context: Context<"pull_request.opened" | "pull_request.ready_for_review", SupportedEvents["pull_request.opened" | "pull_request.ready_for_review"]> -): Promise { +export async function handleCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">): Promise { const { logger, payload, config: { model }, env: { UBIQUITY_OS_APP_NAME }, } = context; - const { - pull_request, + let { repository: { owner: { login: repoOwner }, name: repoName, }, } = payload; + const taskSpec = await getTaskSpecFromPullRequest(context, repoOwner, repoName); - if (pull_request.draft) { - return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; - } - - const { issues: closingIssues } = await checkIfPrClosesIssues(context.octokit, { - owner: pull_request.base.repo.owner.login, - repo: pull_request.base.repo.name, - pr_number: pull_request.number, - }); - - let taskSpec; - let owner, repo, issueNumber; - - if (closingIssues.length === 0) { - const linkedViaBodyHash = pull_request.body?.match(/#(\d+)/g); - const urlMatch = getOwnerRepoIssueNumberFromUrl(pull_request.body); - - if (linkedViaBodyHash?.length) { - const issueNumber = linkedViaBodyHash[0].replace("#", ""); - const issue = await fetchIssue({ context, owner: repoOwner, repo: repoName, issueNum: Number(issueNumber) }); - if (!issue) { - throw logger.error("This pull request does not have an linked task, please link one before merging."); - } - - taskSpec = issue.body; - } - - if (urlMatch && !taskSpec) { - owner = urlMatch.owner; - repo = urlMatch.repo; - issueNumber = urlMatch.issueNumber; - const issue = await fetchIssue({ context, owner, repo, issueNum: Number(issueNumber) }); - if (!issue) { - throw logger.error("This pull request does not have an linked task, please link one before merging."); - } - - taskSpec = issue.body; - } - } else if (closingIssues.length > 1) { - throw logger.error("Multiple tasks linked to this PR, needs investigated to see how best to handle it.", { - closingIssues, - pull_request, - }); - } else { - taskSpec = closingIssues[0].body; - } - - if (!taskSpec) { - throw logger.error("Task Spec not found, please link one before merging."); - } - - const tempOwner = "ubiquity-os-marketplace"; - const tempRepo = "command-ask"; - const tempIssueNumber = 11; - const prDiff = await fetchPullRequestDiff(context, tempOwner, tempRepo, tempIssueNumber); + repoOwner = "ubiquity-os-marketplace"; // remove after QA + repoName = "command-ask"; // remove after QA + const prDiff = await fetchPullRequestDiff(context, repoOwner, repoName, 11 /* remove after QA*/); if (!prDiff) { throw logger.error("PR Diff not found"); } - const question = "What's missing compared to the spec?"; - const additionalContext: string[] = [prDiff, taskSpec]; - const localContext: string[] = []; - /** - * These should be dynamic on every query imo not just here. - */ - const groundTruths: string[] = await findGroundTruths(context, taskSpec); - - const llmResponse = await context.adapters.openai.completions.createCompletion( - PULL_PRECHECK_SYSTEM_MESSAGE, - question, + const creationOptions = { + systemMessage: PULL_PRECHECK_SYSTEM_MESSAGE, + prompt: "What's missing compared to the spec?", model, - additionalContext, - localContext, - groundTruths, - UBIQUITY_OS_APP_NAME - ); - - return handleLlmQueryOutput(context, llmResponse); -} - -function getOwnerRepoIssueNumberFromUrl(body: string | undefined | null): { owner: string; repo: string; issueNumber: string } | null { - if (!body) return null; - - const regex = /https:\/\/(www\.)?github.com\/(?[\w-]+)\/(?[\w-]+)\/issues\/(?\d+)/i; - const match = body.match(regex); - - if (match && match.groups) { - const { owner, repo, issueNumber } = match.groups; - return { owner, repo, issueNumber }; - } - - return null; + additionalContext: [prDiff, taskSpec], + localContext: [], + groundTruths: await findGroundTruths(context, taskSpec), + botName: UBIQUITY_OS_APP_NAME, + }; + + const llmResponse = await context.adapters.openai.completions.createCompletion(creationOptions); + console.log(creationOptions, llmResponse); + return { status: 200, reason: "Success" }; + // return handleLlmQueryOutput(context, llmResponse); } From 48956a8d23591e0b78df05e5efee9e9033209c53 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:45:08 +0100 Subject: [PATCH 21/59] chore: correct Logs --- package.json | 5 +- src/handlers/pull-precheck.ts | 2 +- src/helpers/errors.ts | 6 +- src/helpers/gql-queries.ts | 4 +- src/plugin.ts | 2 +- src/types/context.ts | 4 +- tests/main.test.ts | 2 +- yarn.lock | 389 +++++++++++++++++++++++++++++++++- 8 files changed, 392 insertions(+), 22 deletions(-) diff --git a/package.json b/package.json index 9ea0fba..e87e607 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,8 @@ "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", "@supabase/supabase-js": "^2.45.4", - "@ubiquity-dao/ubiquibot-logger": "^1.3.0", + "@ubiquity-os/ubiquity-os-kernel": "^2.4.0", + "@ubiquity-os/ubiquity-os-logger": "^1.3.2", "dotenv": "^16.4.5", "openai": "^4.63.0", "typebox-validators": "0.3.5", @@ -85,4 +86,4 @@ "@commitlint/config-conventional" ] } -} +} \ No newline at end of file diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index 90110ed..68c2835 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -1,8 +1,8 @@ import { PULL_PRECHECK_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; import { fetchPullRequestDiff } from "../helpers/issue-fetching"; +import { canPerformReview } from "../helpers/pull-helpers/can-perform-review"; import { getTaskSpecFromPullRequest } from "../helpers/pull-helpers/get-task-spec"; import { hasCollaboratorConvertedPr } from "../helpers/pull-helpers/has-collaborator-converted"; -import { canPerformReview } from "../helpers/pull-requests"; import { Context, SupportedEvents } from "../types"; import { CallbackResult } from "../types/proxy"; import { findGroundTruths } from "./find-ground-truths"; diff --git a/src/helpers/errors.ts b/src/helpers/errors.ts index 84da985..271f8e2 100644 --- a/src/helpers/errors.ts +++ b/src/helpers/errors.ts @@ -1,6 +1,6 @@ -import { LogReturn, Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { LogReturn, Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Context } from "../types"; -import { addCommentToIssue } from "../handlers/add-comment"; +// import { addCommentToIssue } from "../handlers/add-comment"; export const logger = new Logs("debug"); export function handleUncaughtError(error: unknown) { @@ -24,7 +24,7 @@ export async function bubbleUpErrorComment(context: Context, err: unknown, post } if (post) { - await addCommentToIssue(context, `${errorMessage?.logMessage.diff}\n`); + // await addCommentToIssue(context, `${errorMessage?.logMessage.diff}\n`); } return errorMessage; diff --git a/src/helpers/gql-queries.ts b/src/helpers/gql-queries.ts index c21113a..97cd4f7 100644 --- a/src/helpers/gql-queries.ts +++ b/src/helpers/gql-queries.ts @@ -1,7 +1,7 @@ -import { User, PullRequest, Repository } from "@octokit/graphql-schema"; +import { PullRequest } from "@octokit/graphql-schema"; type ClosedByPullRequestsReferences = { - node: Pick & { owner: Pick; name: Pick }; + node: Pick & { owner: string; name: string }; }; export type IssuesClosedByThisPr = { diff --git a/src/plugin.ts b/src/plugin.ts index bdfc3e8..b972acd 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,7 +1,7 @@ import { Octokit } from "@octokit/rest"; import { PluginInputs } from "./types"; import { Context } from "./types"; -import { LogLevel, Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { LogLevel, Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Env } from "./types/env"; import { createAdapters } from "./adapters"; import { createClient } from "@supabase/supabase-js"; diff --git a/src/types/context.ts b/src/types/context.ts index 6998ddd..1cde31c 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -1,11 +1,11 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; import { PluginSettings } from "./plugin-inputs"; -import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Env } from "./env"; import { createAdapters } from "../adapters"; -export type SupportedEventsU = "issue_comment.created" | "pull_request.opened" | "pull_request.ready_for_review"; +export type SupportedEventsU = "issue_comment.created" | "pull_request.opened" | "pull_request.ready_for_review" | "pull_request.converted_to_draft"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; diff --git a/tests/main.test.ts b/tests/main.test.ts index 91de76e..c093a8a 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -2,7 +2,7 @@ import { db } from "./__mocks__/db"; import { server } from "./__mocks__/node"; import usersGet from "./__mocks__/users-get.json"; import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it } from "@jest/globals"; -import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Context, SupportedEventsU } from "../src/types"; import { drop } from "@mswjs/data"; import issueTemplate from "./__mocks__/issue-template"; diff --git a/yarn.lock b/yarn.lock index e149d26..a41885f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,6 +2,14 @@ # yarn lockfile v1 +"@actions/core@1.10.1": + version "1.10.1" + resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.1.tgz#61108e7ac40acae95ee36da074fa5850ca4ced8a" + integrity sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g== + dependencies: + "@actions/http-client" "^2.0.1" + uuid "^8.3.2" + "@actions/core@^1.11.1": version "1.11.1" resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.11.1.tgz#ae683aac5112438021588030efb53b1adb86f172" @@ -17,7 +25,7 @@ dependencies: "@actions/io" "^1.0.1" -"@actions/github@^6.0.0": +"@actions/github@6.0.0", "@actions/github@^6.0.0": version "6.0.0" resolved "https://registry.yarnpkg.com/@actions/github/-/github-6.0.0.tgz#65883433f9d81521b782a64cc1fd45eef2191ea7" integrity sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g== @@ -364,6 +372,11 @@ "@types/tough-cookie" "^4.0.5" tough-cookie "^4.1.4" +"@cfworker/json-schema@2.0.1": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@cfworker/json-schema/-/json-schema-2.0.1.tgz#563463393a1f19b06732491e604e0cc8255baf8a" + integrity sha512-1w7xVrTFjAWBVaOWRH5AMdKpJdltF4iy/d93E7qj8Rox6yY9OzEW1aC7T5eONrDOxXrlnsclPw9v24XW2c0mkg== + "@cloudflare/kv-asset-handler@0.3.4": version "0.3.4" resolved "https://registry.yarnpkg.com/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.3.4.tgz#5cc152847c8ae4d280ec5d7f4f6ba8c976b585c3" @@ -931,7 +944,7 @@ resolved "https://registry.yarnpkg.com/@cspell/url/-/url-8.9.0.tgz#313ccde44570b3158cb7baa3eb53e54572d7263f" integrity sha512-FaHTEx6OBVKlkX7VgAPofBZ5vIdxNWYalb0uZwJ5FCc/PCMIF5l91DQGQxRMas3qzRACR911kJamPdeK/3qilw== -"@cspotcode/source-map-support@0.8.1": +"@cspotcode/source-map-support@0.8.1", "@cspotcode/source-map-support@^0.8.0": version "0.8.1" resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== @@ -1628,11 +1641,75 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@octokit/auth-app@7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@octokit/auth-app/-/auth-app-7.1.0.tgz#55a3d3b3b3607b9d375abbe946163dca3a25c2c9" + integrity sha512-cazGaJPSgeZ8NkVYeM/C5l/6IQ5vZnsI8p1aMucadCkt/bndI+q+VqwrlnWbASRmenjOkf1t1RpCKrif53U8gw== + dependencies: + "@octokit/auth-oauth-app" "^8.1.0" + "@octokit/auth-oauth-user" "^5.1.0" + "@octokit/request" "^9.1.1" + "@octokit/request-error" "^6.1.1" + "@octokit/types" "^13.4.1" + lru-cache "^10.0.0" + universal-github-app-jwt "^2.2.0" + universal-user-agent "^7.0.0" + +"@octokit/auth-oauth-app@^8.1.0": + version "8.1.1" + resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz#6204affa6e86f535016799cadf2af9befe5e893c" + integrity sha512-5UtmxXAvU2wfcHIPPDWzVSAWXVJzG3NWsxb7zCFplCWEmMCArSZV0UQu5jw5goLQXbFyOr5onzEH37UJB3zQQg== + dependencies: + "@octokit/auth-oauth-device" "^7.0.0" + "@octokit/auth-oauth-user" "^5.0.1" + "@octokit/request" "^9.0.0" + "@octokit/types" "^13.0.0" + universal-user-agent "^7.0.0" + +"@octokit/auth-oauth-device@^7.0.0", "@octokit/auth-oauth-device@^7.0.1": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.1.tgz#7b4f8f97cbcadbe9894d48cde4406dbdef39875a" + integrity sha512-HWl8lYueHonuyjrKKIup/1tiy0xcmQCdq5ikvMO1YwkNNkxb6DXfrPjrMYItNLyCP/o2H87WuijuE+SlBTT8eg== + dependencies: + "@octokit/oauth-methods" "^5.0.0" + "@octokit/request" "^9.0.0" + "@octokit/types" "^13.0.0" + universal-user-agent "^7.0.0" + +"@octokit/auth-oauth-user@^5.0.1", "@octokit/auth-oauth-user@^5.1.0": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.1.tgz#4f1570c6ee15bb9ddc3dcca83308dcaa159e3848" + integrity sha512-rRkMz0ErOppdvEfnemHJXgZ9vTPhBuC6yASeFaB7I2yLMd7QpjfrL1mnvRPlyKo+M6eeLxrKanXJ9Qte29SRsw== + dependencies: + "@octokit/auth-oauth-device" "^7.0.1" + "@octokit/oauth-methods" "^5.0.0" + "@octokit/request" "^9.0.1" + "@octokit/types" "^13.0.0" + universal-user-agent "^7.0.0" + "@octokit/auth-token@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7" integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA== +"@octokit/auth-token@^5.0.0": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-5.1.1.tgz#3bbfe905111332a17f72d80bd0b51a3e2fa2cf07" + integrity sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA== + +"@octokit/core@6.1.2", "@octokit/core@^6.1.2": + version "6.1.2" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-6.1.2.tgz#20442d0a97c411612da206411e356014d1d1bd17" + integrity sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg== + dependencies: + "@octokit/auth-token" "^5.0.0" + "@octokit/graphql" "^8.0.0" + "@octokit/request" "^9.0.0" + "@octokit/request-error" "^6.0.1" + "@octokit/types" "^13.0.0" + before-after-hook "^3.0.2" + universal-user-agent "^7.0.0" + "@octokit/core@^5.0.1", "@octokit/core@^5.0.2": version "5.2.0" resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea" @@ -1646,6 +1723,14 @@ before-after-hook "^2.2.0" universal-user-agent "^6.0.0" +"@octokit/endpoint@^10.0.0": + version "10.1.1" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-10.1.1.tgz#1a9694e7aef6aa9d854dc78dd062945945869bcc" + integrity sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q== + dependencies: + "@octokit/types" "^13.0.0" + universal-user-agent "^7.0.2" + "@octokit/endpoint@^9.0.1": version "9.0.5" resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.5.tgz#e6c0ee684e307614c02fc6ac12274c50da465c44" @@ -1671,6 +1756,30 @@ "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" +"@octokit/graphql@^8.0.0": + version "8.1.1" + resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-8.1.1.tgz#3cacab5f2e55d91c733e3bf481d3a3f8a5f639c4" + integrity sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg== + dependencies: + "@octokit/request" "^9.0.0" + "@octokit/types" "^13.0.0" + universal-user-agent "^7.0.0" + +"@octokit/oauth-authorization-url@^7.0.0": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz#0e17c2225eb66b58ec902d02b6f1315ffe9ff04b" + integrity sha512-ooXV8GBSabSWyhLUowlMIVd9l1s2nsOGQdlP2SQ4LnkEsGXzeCvbSbCPdZThXhEFzleGPwbapT0Sb+YhXRyjCA== + +"@octokit/oauth-methods@^5.0.0": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@octokit/oauth-methods/-/oauth-methods-5.1.2.tgz#fd31d2a69f4c91d1abc1ed1814dda5252c697e02" + integrity sha512-C5lglRD+sBlbrhCUTxgJAFjWgJlmTx5bQ7Ch0+2uqRjYv7Cfb5xpX4WuSC9UgQna3sqRGBL9EImX9PvTpMaQ7g== + dependencies: + "@octokit/oauth-authorization-url" "^7.0.0" + "@octokit/request" "^9.1.0" + "@octokit/request-error" "^6.1.0" + "@octokit/types" "^13.0.0" + "@octokit/openapi-types@^20.0.0": version "20.0.0" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-20.0.0.tgz#9ec2daa0090eeb865ee147636e0c00f73790c6e5" @@ -1686,6 +1795,11 @@ resolved "https://registry.yarnpkg.com/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.2.1.tgz#08b974f1e83a75c4d3ce23f798c7667b433bf4cd" integrity sha512-msAU1oTSm0ZmvAE0xDemuF4tVs5i0xNnNGtNmr4EuATi+1Rn8cZDetj6NXioSf5LwnxEc209COa/WOSbjuhLUA== +"@octokit/openapi-webhooks-types@8.3.0": + version "8.3.0" + resolved "https://registry.yarnpkg.com/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.3.0.tgz#a7a4da00c0f27f7f5708eb3fcebefa08f8d51125" + integrity sha512-vKLsoR4xQxg4Z+6rU/F65ItTUz/EXbD+j/d4mlq2GW8TsA4Tc8Kdma2JTAAJ5hrKWUQzkR/Esn2fjsqiVRYaQg== + "@octokit/plugin-paginate-rest@11.3.1": version "11.3.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.1.tgz#fe92d04b49f134165d6fbb716e765c2f313ad364" @@ -1693,6 +1807,20 @@ dependencies: "@octokit/types" "^13.5.0" +"@octokit/plugin-paginate-rest@11.3.3": + version "11.3.3" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.3.tgz#efc97ba66aae6797e2807a082f99b9cfc0e05aba" + integrity sha512-o4WRoOJZlKqEEgj+i9CpcmnByvtzoUYC6I8PD2SA95M+BJ2x8h7oLcVOg9qcowWXBOdcTRsMZiwvM3EyLm9AfA== + dependencies: + "@octokit/types" "^13.5.0" + +"@octokit/plugin-paginate-rest@^11.0.0": + version "11.3.5" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.5.tgz#a1929b3ba3dc7b63bc73bb6d3c7a3faf2a9c7649" + integrity sha512-cgwIRtKrpwhLoBi0CUNuY83DPGRMaWVjqVI/bGKsLJ4PzyWZNaEmhHroI2xlrVXkk6nFv0IsZpOp+ZWSWUS2AQ== + dependencies: + "@octokit/types" "^13.6.0" + "@octokit/plugin-paginate-rest@^9.0.0": version "9.2.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz#2e2a2f0f52c9a4b1da1a3aa17dabe3c459b9e401" @@ -1705,6 +1833,11 @@ resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz#98a3ca96e0b107380664708111864cb96551f958" integrity sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA== +"@octokit/plugin-request-log@^5.3.1": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-5.3.1.tgz#ccb75d9705de769b2aa82bcd105cc96eb0c00f69" + integrity sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw== + "@octokit/plugin-rest-endpoint-methods@13.2.2": version "13.2.2" resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.2.tgz#af8e5dd2cddfea576f92ffaf9cb84659f302a638" @@ -1712,6 +1845,13 @@ dependencies: "@octokit/types" "^13.5.0" +"@octokit/plugin-rest-endpoint-methods@13.2.4": + version "13.2.4" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.4.tgz#543add032d3fe3f5d2839bfd619cf66d85469f01" + integrity sha512-gusyAVgTrPiuXOdfqOySMDztQHv6928PQ3E4dqVGEtOvRXAKRbJR4b1zQyniIT9waqaWk/UDaoJ2dyPr7Bk7Iw== + dependencies: + "@octokit/types" "^13.5.0" + "@octokit/plugin-rest-endpoint-methods@^10.0.0": version "10.4.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz#41ba478a558b9f554793075b2e20cd2ef973be17" @@ -1719,6 +1859,30 @@ dependencies: "@octokit/types" "^12.6.0" +"@octokit/plugin-rest-endpoint-methods@^13.0.0": + version "13.2.6" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.6.tgz#b9d343dbe88a6cb70cc7fa16faa98f0a29ffe654" + integrity sha512-wMsdyHMjSfKjGINkdGKki06VEkgdEldIGstIEyGX0wbYHGByOwN/KiM+hAAlUwAtPkP3gvXtVQA9L3ITdV2tVw== + dependencies: + "@octokit/types" "^13.6.1" + +"@octokit/plugin-retry@7.1.1": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-retry/-/plugin-retry-7.1.1.tgz#a84483e4afdd068dd71da81abe206a9e442c1288" + integrity sha512-G9Ue+x2odcb8E1XIPhaFBnTTIrrUDfXN05iFXiqhR+SeeeDMMILcAnysOsxUpEWcQp2e5Ft397FCXTcPkiPkLw== + dependencies: + "@octokit/request-error" "^6.0.0" + "@octokit/types" "^13.0.0" + bottleneck "^2.15.3" + +"@octokit/plugin-throttling@9.3.1": + version "9.3.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-9.3.1.tgz#5648165e1e70e861625f3a16af6c55cafe861061" + integrity sha512-Qd91H4liUBhwLB2h6jZ99bsxoQdhgPk6TdwnClPyTBSDAdviGPceViEgUwj+pcQDmB/rfAXAXK7MTochpHM3yQ== + dependencies: + "@octokit/types" "^13.0.0" + bottleneck "^2.15.3" + "@octokit/request-error@^5.1.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30" @@ -1728,6 +1892,13 @@ deprecation "^2.0.0" once "^1.4.0" +"@octokit/request-error@^6.0.0", "@octokit/request-error@^6.1.0", "@octokit/request-error@^6.1.1": + version "6.1.5" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-6.1.5.tgz#907099e341c4e6179db623a0328d678024f54653" + integrity sha512-IlBTfGX8Yn/oFPMwSfvugfncK2EwRLjzbrpifNaMY8o/HTEAFqCA1FZxjD9cWvSKBHgrIhc4CSBIzMxiLsbzFQ== + dependencies: + "@octokit/types" "^13.0.0" + "@octokit/request-error@^6.0.1": version "6.1.1" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-6.1.1.tgz#bed1b5f52ce7fefb1077a92bf42124ff36f73f2c" @@ -1745,6 +1916,16 @@ "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" +"@octokit/request@^9.0.0", "@octokit/request@^9.0.1", "@octokit/request@^9.1.0", "@octokit/request@^9.1.1": + version "9.1.3" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-9.1.3.tgz#42b693bc06238f43af3c037ebfd35621c6457838" + integrity sha512-V+TFhu5fdF3K58rs1pGUJIDH5RZLbZm5BI+MNF+6o/ssFNT4vWlCh/tVpF3NxGtP15HUxTTMUbsG5llAuU2CZA== + dependencies: + "@octokit/endpoint" "^10.0.0" + "@octokit/request-error" "^6.0.1" + "@octokit/types" "^13.1.0" + universal-user-agent "^7.0.2" + "@octokit/rest@20.1.1": version "20.1.1" resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-20.1.1.tgz#ec775864f53fb42037a954b9a40d4f5275b3dc95" @@ -1755,6 +1936,16 @@ "@octokit/plugin-request-log" "^4.0.0" "@octokit/plugin-rest-endpoint-methods" "13.2.2" +"@octokit/rest@^21.0.2": + version "21.0.2" + resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-21.0.2.tgz#9b767dbc1098daea8310fd8b76bf7a97215d5972" + integrity sha512-+CiLisCoyWmYicH25y1cDfCrv41kRSvTq6pPWtRroRJzhsCZWZyCqGyI8foJT5LmScADSwRAnr/xo+eewL04wQ== + dependencies: + "@octokit/core" "^6.1.2" + "@octokit/plugin-paginate-rest" "^11.0.0" + "@octokit/plugin-request-log" "^5.3.1" + "@octokit/plugin-rest-endpoint-methods" "^13.0.0" + "@octokit/types@^12.6.0": version "12.6.0" resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.6.0.tgz#8100fb9eeedfe083aae66473bd97b15b62aedcb2" @@ -1769,11 +1960,23 @@ dependencies: "@octokit/openapi-types" "^22.2.0" +"@octokit/types@^13.4.1", "@octokit/types@^13.6.0", "@octokit/types@^13.6.1": + version "13.6.1" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.6.1.tgz#432fc6c0aaae54318e5b2d3e15c22ac97fc9b15f" + integrity sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g== + dependencies: + "@octokit/openapi-types" "^22.2.0" + "@octokit/webhooks-methods@^5.0.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@octokit/webhooks-methods/-/webhooks-methods-5.1.0.tgz#13b6c08f89902c1ab0ddf31c6eeeec9c2772cfe6" integrity sha512-yFZa3UH11VIxYnnoOYCVoJ3q4ChuSOk2IVBBQ0O3xtKX4x9bmKb/1t+Mxixv2iUhzMdOl1qeWJqEhouXXzB3rQ== +"@octokit/webhooks-types@7.5.1": + version "7.5.1" + resolved "https://registry.yarnpkg.com/@octokit/webhooks-types/-/webhooks-types-7.5.1.tgz#e05399ab6bbbef8b78eb6bfc1a2cb138ea861104" + integrity sha512-1dozxWEP8lKGbtEu7HkRbK1F/nIPuJXNfT0gd96y6d3LcHZTtRtlf8xz3nicSJfesADxJyDh+mWBOsdLkqgzYw== + "@octokit/webhooks@13.2.7": version "13.2.7" resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-13.2.7.tgz#03f89b278cd63f271eba3062f0b75ddd18a82252" @@ -1784,6 +1987,15 @@ "@octokit/webhooks-methods" "^5.0.0" aggregate-error "^5.0.0" +"@octokit/webhooks@13.3.0": + version "13.3.0" + resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-13.3.0.tgz#fd5d54d47c789c75d60a00eb04e982152d7c654a" + integrity sha512-TUkJLtI163Bz5+JK0O+zDkQpn4gKwN+BovclUvCj6pI/6RXrFqQvUMRS2M+Rt8Rv0qR3wjoMoOPmpJKeOh0nBg== + dependencies: + "@octokit/openapi-webhooks-types" "8.3.0" + "@octokit/request-error" "^6.0.1" + "@octokit/webhooks-methods" "^5.0.0" + "@open-draft/deferred-promise@^2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz#4a822d10f6f0e316be4d67b4d4f8c9a124b073bd" @@ -1812,6 +2024,11 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.32.33.tgz#823af450f6f1571a85c12e2b1f2a0b134f61920f" integrity sha512-jM50BfkKA0fwfj0uRRO6asfNfbU0oZipJIb/bL2+BUH/THjuEf2BMiqBOvKfBji5Z9t59NboZQGNfKZbdV50Iw== +"@sinclair/typebox@0.32.35": + version "0.32.35" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.32.35.tgz#41c04473509478df9895800018a3d3ae7d40fb3c" + integrity sha512-Ul3YyOTU++to8cgNkttakC0dWvpERr6RYoHO2W47DLbFvrwBDJUY31B1sImH6JZSYc4Kt4PyHtoPNu+vL2r2dA== + "@sinclair/typebox@^0.27.8": version "0.27.8" resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" @@ -1897,6 +2114,26 @@ "@supabase/realtime-js" "2.10.2" "@supabase/storage-js" "2.7.0" +"@tsconfig/node10@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" + integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== + "@types/babel__core@^7.1.14": version "7.20.5" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017" @@ -2171,10 +2408,37 @@ "@typescript-eslint/types" "7.13.1" eslint-visitor-keys "^3.4.3" -"@ubiquity-dao/ubiquibot-logger@^1.3.0": - version "1.3.1" - resolved "https://registry.yarnpkg.com/@ubiquity-dao/ubiquibot-logger/-/ubiquibot-logger-1.3.1.tgz#c3f45d70014dcc2551442c28101046e1c8ea6886" - integrity sha512-kDLnVP87Y3yZV6NnqIEDAOz+92IW0nIcccML2lUn93uZ5ada78vfdTPtwPJo8tkXl1Z9qMKAqqHkwBMp1Ksnag== +"@ubiquity-os/ubiquity-os-kernel@^2.4.0": + version "2.4.0" + resolved "https://registry.yarnpkg.com/@ubiquity-os/ubiquity-os-kernel/-/ubiquity-os-kernel-2.4.0.tgz#1bb74d4b02ef5ba6b0c1c01d509e3e0f58609904" + integrity sha512-KT8AwtMOHA99GoVUs43eAR2PZii9AHmY9NjOlBtvotB5tXbeEIyhjgHr0kgRncgiLJU1UFIe0QYMmpOvmXiQpg== + dependencies: + "@actions/core" "1.10.1" + "@actions/github" "6.0.0" + "@cfworker/json-schema" "2.0.1" + "@octokit/auth-app" "7.1.0" + "@octokit/core" "6.1.2" + "@octokit/plugin-paginate-rest" "11.3.3" + "@octokit/plugin-rest-endpoint-methods" "13.2.4" + "@octokit/plugin-retry" "7.1.1" + "@octokit/plugin-throttling" "9.3.1" + "@octokit/rest" "^21.0.2" + "@octokit/types" "^13.5.0" + "@octokit/webhooks" "13.3.0" + "@octokit/webhooks-types" "7.5.1" + "@sinclair/typebox" "0.32.35" + "@ubiquity-os/ubiquity-os-logger" "^1.3.2" + dotenv "16.4.5" + hono "4.4.13" + smee-client "2.0.1" + ts-node "^10.9.2" + typebox-validators "0.3.5" + yaml "2.4.5" + +"@ubiquity-os/ubiquity-os-logger@^1.3.2": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@ubiquity-os/ubiquity-os-logger/-/ubiquity-os-logger-1.3.2.tgz#4423bc0baeac5c2f73123d15fd961310521163cd" + integrity sha512-oTIzR8z4jAQmaeJp98t1bZUKE3Ws9pas0sbxt58fC37MwXclPMWrLO+a0JlhPkdJYsvpv/q/79wC2MKVhOIVXQ== JSONStream@^1.3.5: version "1.3.5" @@ -2196,11 +2460,23 @@ acorn-jsx@^5.3.2: resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== +acorn-walk@^8.1.1: + version "8.3.4" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7" + integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g== + dependencies: + acorn "^8.11.0" + acorn-walk@^8.2.0: version "8.3.2" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== +acorn@^8.11.0, acorn@^8.4.1: + version "8.13.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.13.0.tgz#2a30d670818ad16ddd6a35d3842dacec9e5d7ca3" + integrity sha512-8zSiw54Oxrdym50NlZ9sUusyO1Z1ZchgRLWRaK6c86XJFClyCgFKetdowBg5bKxyp/u+CDBJG4Mpp0m3HLZl9w== + acorn@^8.12.0: version "8.12.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.0.tgz#1627bfa2e058148036133b8d9b51a700663c294c" @@ -2308,6 +2584,11 @@ anymatch@^3.0.3, anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -2473,6 +2754,11 @@ before-after-hook@^2.2.0: resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== +before-after-hook@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-3.0.2.tgz#d5665a5fa8b62294a5aa0a499f933f4a1016195d" + integrity sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A== + binary-extensions@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" @@ -2483,6 +2769,11 @@ blake3-wasm@^2.1.5: resolved "https://registry.yarnpkg.com/blake3-wasm/-/blake3-wasm-2.1.5.tgz#b22dbb84bc9419ed0159caa76af4b1b132e6ba52" integrity sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g== +bottleneck@^2.15.3: + version "2.19.5" + resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91" + integrity sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw== + brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" @@ -2750,7 +3041,7 @@ combined-stream@^1.0.8: dependencies: delayed-stream "~1.0.0" -commander@^12.1.0, commander@~12.1.0: +commander@^12.0.0, commander@^12.1.0, commander@~12.1.0: version "12.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" integrity sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA== @@ -2865,6 +3156,11 @@ create-jest@^29.7.0: jest-util "^29.7.0" prompts "^2.0.1" +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" @@ -3131,6 +3427,11 @@ diff3@0.0.3: resolved "https://registry.yarnpkg.com/diff3/-/diff3-0.0.3.tgz#d4e5c3a4cdf4e5fe1211ab42e693fcb4321580fc" integrity sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g== +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -3145,7 +3446,7 @@ dot-prop@^5.1.0: dependencies: is-obj "^2.0.0" -dotenv@^16.4.5: +dotenv@16.4.5, dotenv@^16.4.5: version "16.4.5" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== @@ -3509,6 +3810,11 @@ events@^3.3.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== +eventsource@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-2.0.2.tgz#76dfcc02930fb2ff339520b6d290da573a9e8508" + integrity sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA== + execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" @@ -3978,6 +4284,11 @@ headers-polyfill@^4.0.2: resolved "https://registry.yarnpkg.com/headers-polyfill/-/headers-polyfill-4.0.3.tgz#922a0155de30ecc1f785bcf04be77844ca95ad07" integrity sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ== +hono@4.4.13: + version "4.4.13" + resolved "https://registry.yarnpkg.com/hono/-/hono-4.4.13.tgz#954e8f6e4bab14f3f9d7bac4eef4c56d23e7f900" + integrity sha512-c6qqenclmQ6wpXzqiElMa2jt423PVCmgBreDfC5s2lPPpGk7d0lOymd8QTzFZyYC5mSSs6imiTMPip+gLwuW/g== + hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" @@ -5020,6 +5331,11 @@ log-update@^6.0.0: strip-ansi "^7.1.0" wrap-ansi "^9.0.0" +lru-cache@^10.0.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -5048,7 +5364,7 @@ make-dir@^4.0.0: dependencies: semver "^7.5.3" -make-error@1.x: +make-error@1.x, make-error@^1.1.1: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -6027,6 +6343,15 @@ slice-ansi@^7.0.0: ansi-styles "^6.2.1" is-fullwidth-code-point "^5.0.0" +smee-client@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/smee-client/-/smee-client-2.0.1.tgz#348a644c3499cc7687fcb42fbbaeeeb3211a365d" + integrity sha512-s2+eG9vNMWQQvu8Jz+SfAiihpYsmaMtcyPnHtBuZEhaAAQOQV63xSSL9StWv2p08xKgvSC8pEZ28rXoy41FhLg== + dependencies: + commander "^12.0.0" + eventsource "^2.0.2" + validator "^13.11.0" + smol-toml@^1.1.4: version "1.2.1" resolved "https://registry.yarnpkg.com/smol-toml/-/smol-toml-1.2.1.tgz#6216334548763d4aac76cafff19f8914937ee13a" @@ -6374,6 +6699,25 @@ ts-jest@29.1.5: semver "^7.5.3" yargs-parser "^21.0.1" +ts-node@^10.9.2: + version "10.9.2" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" + integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + tsconfig-paths@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz#ef78e19039133446d244beac0fd6a1632e2d107c" @@ -6547,11 +6891,21 @@ unicorn-magic@^0.1.0: resolved "https://registry.yarnpkg.com/unicorn-magic/-/unicorn-magic-0.1.0.tgz#1bb9a51c823aaf9d73a8bfcd3d1a23dde94b0ce4" integrity sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ== +universal-github-app-jwt@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/universal-github-app-jwt/-/universal-github-app-jwt-2.2.0.tgz#dc6c8929e76f1996a766ba2a08fb420f73365d77" + integrity sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ== + universal-user-agent@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== +universal-user-agent@^7.0.0, universal-user-agent@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-7.0.2.tgz#52e7d0e9b3dc4df06cc33cb2b9fd79041a54827e" + integrity sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q== + universalify@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" @@ -6595,6 +6949,11 @@ uuid@^8.3.1, uuid@^8.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + v8-to-istanbul@^9.0.1: version "9.2.0" resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz#2ed7644a245cddd83d4e087b9b33b3e62dfd10ad" @@ -6612,6 +6971,11 @@ validate-npm-package-license@^3.0.1: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" +validator@^13.11.0: + version "13.12.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.12.0.tgz#7d78e76ba85504da3fee4fd1922b385914d4b35f" + integrity sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg== + vlq@^0.2.1: version "0.2.3" resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" @@ -6819,7 +7183,7 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== -yaml@^2.4.5: +yaml@2.4.5, yaml@^2.4.5: version "2.4.5" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.4.5.tgz#60630b206dd6d84df97003d33fc1ddf6296cca5e" integrity sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg== @@ -6847,6 +7211,11 @@ yargs@^17.0.0, yargs@^17.3.1, yargs@^17.7.2: y18n "^5.0.5" yargs-parser "^21.1.1" +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" From 50eae3992631b20349d84ea5f86f6d650ffd24cb Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:45:54 +0100 Subject: [PATCH 22/59] chore: move helper --- .../submit-code-review.ts} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename src/helpers/{pull-requests.ts => pull-helpers/submit-code-review.ts} (88%) diff --git a/src/helpers/pull-requests.ts b/src/helpers/pull-helpers/submit-code-review.ts similarity index 88% rename from src/helpers/pull-requests.ts rename to src/helpers/pull-helpers/submit-code-review.ts index 86217fc..16893e9 100644 --- a/src/helpers/pull-requests.ts +++ b/src/helpers/pull-helpers/submit-code-review.ts @@ -1,5 +1,5 @@ -import { Context } from "../types"; -import { CodeReviewStatus } from "../types/pull-requests"; +import { Context } from "../../types"; +import { CodeReviewStatus } from "../../types/pull-requests"; export async function submitCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">, review: string, status: CodeReviewStatus) { const { logger, payload } = context; @@ -21,4 +21,4 @@ export async function submitCodeReview(context: Context<"pull_request.opened" | } catch (er) { throw logger.error("Failed to submit code review", { err: er }); } -} +} \ No newline at end of file From b5b418d3106749532dcd442f45d16791579c73c2 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:46:13 +0100 Subject: [PATCH 23/59] chore: owner - repo - issueNo url util --- src/helpers/get-owner-repo-issue-from-url.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 src/helpers/get-owner-repo-issue-from-url.ts diff --git a/src/helpers/get-owner-repo-issue-from-url.ts b/src/helpers/get-owner-repo-issue-from-url.ts new file mode 100644 index 0000000..24e22f0 --- /dev/null +++ b/src/helpers/get-owner-repo-issue-from-url.ts @@ -0,0 +1,13 @@ +export function getOwnerRepoIssueNumberFromUrl(body: string | undefined | null): { owner: string; repo: string; issueNumber: string } | null { + if (!body) return null; + + const regex = /https:\/\/(www\.)?github.com\/(?[\w-]+)\/(?[\w-]+)\/issues\/(?\d+)/i; + const match = body.match(regex); + + if (match && match.groups) { + const { owner, repo, issueNumber } = match.groups; + return { owner, repo, issueNumber }; + } + + return null; +} From 89228669b9fe5cbf06c28eaa4617b142ec0cd476 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:46:47 +0100 Subject: [PATCH 24/59] chore: one review per day check --- .../pull-helpers/can-perform-review.ts | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 src/helpers/pull-helpers/can-perform-review.ts diff --git a/src/helpers/pull-helpers/can-perform-review.ts b/src/helpers/pull-helpers/can-perform-review.ts new file mode 100644 index 0000000..44e2f80 --- /dev/null +++ b/src/helpers/pull-helpers/can-perform-review.ts @@ -0,0 +1,33 @@ +import { Context } from "../../types"; + +export async function canPerformReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">) { + const { logger, payload } = context; + const { number, organization, repository, action } = payload; + const { owner, name } = repository; + + logger.info(`${organization}/${repository}#${number} - ${action}`); + + const timeline = await context.octokit.issues.listEvents({ + owner: owner.login, + repo: name, + issue_number: number, + }); + + const reviews = timeline.data.filter((event) => event.event === "reviewed"); + const botReviews = reviews.filter((review) => review.actor.type === "Bot"); + + const lastReview = botReviews[botReviews.length - 1]; + const lastReviewDate = new Date(lastReview.created_at); + const now = new Date(); + + const diff = now.getTime() - lastReviewDate.getTime(); + const ONE_DAY = 24 * 60 * 60 * 1000; + + if (diff < ONE_DAY) { + throw logger.error("Only one review per day is allowed"); + } + + logger.info("One review per day check passed"); + + return true; +} From 7501862b714db12cdb48abb7f0810a8329e66293 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:47:16 +0100 Subject: [PATCH 25/59] chore: convert to draft --- .../pull-helpers/convert-pull-to-draft.ts | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 src/helpers/pull-helpers/convert-pull-to-draft.ts diff --git a/src/helpers/pull-helpers/convert-pull-to-draft.ts b/src/helpers/pull-helpers/convert-pull-to-draft.ts new file mode 100644 index 0000000..80e3aad --- /dev/null +++ b/src/helpers/pull-helpers/convert-pull-to-draft.ts @@ -0,0 +1,23 @@ +import { Context } from "../../types"; + +export async function convertPullToDraft(context: Context<"pull_request.opened" | "pull_request.ready_for_review">) { + const { logger, payload } = context; + const { number, organization, repository, action } = payload; + const { owner, name } = repository; + + logger.info(`${organization}/${repository}#${number} - ${action}`); + + try { + await context.octokit.pulls.update({ + owner: owner.login, + repo: name, + pull_number: number, + draft: true, + }); + + logger.info("Pull request converted to draft"); + } catch (er) { + throw logger.error("Failed to convert pull request to draft", { err: er }); + } +} + From b1d5d7033efcaec3d11b7dad2858753cf17bc53d Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:47:37 +0100 Subject: [PATCH 26/59] chore: context fallback for missing task spec --- .../pull-helpers/get-context-if-no-spec.ts | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 src/helpers/pull-helpers/get-context-if-no-spec.ts diff --git a/src/helpers/pull-helpers/get-context-if-no-spec.ts b/src/helpers/pull-helpers/get-context-if-no-spec.ts new file mode 100644 index 0000000..cc389a9 --- /dev/null +++ b/src/helpers/pull-helpers/get-context-if-no-spec.ts @@ -0,0 +1,42 @@ +import { Context } from "../../types"; +import { logger } from "../errors"; +import { formatChatHistory } from "../format-chat-history"; +import { recursivelyFetchLinkedIssues } from "../issue-fetching"; + +export async function getContextIfNoSpecFound( + context: Context<"pull_request.opened" | "pull_request.ready_for_review">, + owner: string, + repo: string, + issueNumber: number +) { + logger.info(`No spec found for PR #${issueNumber} in ${owner}/${repo}`); + const { data: prAsIssue } = await context.octokit.issues.get({ + owner, + repo, + issue_number: 11, // remove after QA + }); + const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ + context, + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + issueNum: context.payload.pull_request.number, + }); + const formattedChat = await formatChatHistory( + { + ...context, + eventName: "issue_comment.created", + payload: { + ...context.payload, + action: "created", + issue: prAsIssue as Context<"issue_comment.created">["payload"]["issue"], + comment: { body: prAsIssue.body } as Context<"issue_comment.created">["payload"]["comment"], + sender: { login: prAsIssue.user?.login } as Context<"issue_comment.created">["payload"]["sender"], + repository: { owner: { login: owner }, name: repo } as Context<"issue_comment.created">["payload"]["repository"], + } as Context<"issue_comment.created">["payload"], + }, + streamlinedComments, + specAndBodies + ); + + return formattedChat.join(""); +} From bbefbad11552391c2e5000bd6912ba8952d83806 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:48:03 +0100 Subject: [PATCH 27/59] chore: get task spec --- src/helpers/pull-helpers/get-task-spec.ts | 61 +++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 src/helpers/pull-helpers/get-task-spec.ts diff --git a/src/helpers/pull-helpers/get-task-spec.ts b/src/helpers/pull-helpers/get-task-spec.ts new file mode 100644 index 0000000..06e4618 --- /dev/null +++ b/src/helpers/pull-helpers/get-task-spec.ts @@ -0,0 +1,61 @@ +import { Context } from "../../types"; +import { getOwnerRepoIssueNumberFromUrl } from "../get-owner-repo-issue-from-url"; +import { checkIfPrClosesIssues } from "../gql-functions"; +import { fetchIssue } from "../issue-fetching"; +import { getContextIfNoSpecFound } from "./get-context-if-no-spec"; + +export async function getTaskSpecFromPullRequest( + context: Context<"pull_request.opened" | "pull_request.ready_for_review">, + repoOwner: string, + repoName: string, + fallbackToConvo: boolean = false +) { + const { + payload: { pull_request }, + logger, + } = context; + let taskSpec; + let owner, repo, issueNumber; + + const { issues: closingIssues } = await checkIfPrClosesIssues(context.octokit, { + owner: pull_request.base.repo.owner.login, + repo: pull_request.base.repo.name, + pr_number: pull_request.number, + }); + + if (closingIssues.length === 0) { + const linkedViaBodyHash = pull_request.body?.match(/#(\d+)/g); + const urlMatch = getOwnerRepoIssueNumberFromUrl(pull_request.body); + + if (linkedViaBodyHash?.length) { + const issueNumber = linkedViaBodyHash[0].replace("#", ""); + const issue = await fetchIssue({ context, owner: repoOwner, repo: repoName, issueNum: Number(issueNumber) }); + taskSpec = issue?.body; + } + + if (urlMatch && !taskSpec) { + owner = urlMatch.owner; + repo = urlMatch.repo; + issueNumber = urlMatch.issueNumber; + const issue = await fetchIssue({ context, owner, repo, issueNum: Number(issueNumber) }); + taskSpec = issue?.body; + } + } else if (closingIssues.length > 1) { + throw logger.error("Multiple tasks linked to this PR, needs investigated to see how best to handle it.", { + closingIssues, + pull_request, + }); + } else { + taskSpec = closingIssues[0].body; + } + + if (!taskSpec) { + throw logger.error("Task spec not found", { pull_request }); + } + + if (!taskSpec && fallbackToConvo) { + taskSpec = await getContextIfNoSpecFound(context, repoOwner, repoName, pull_request.number); + } + + return taskSpec; +} From baa8ade9fc49ad72535f8236f4ad51477f507091 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 24 Oct 2024 20:48:22 +0100 Subject: [PATCH 28/59] chore: has collaborator converted --- .../has-collaborator-converted.ts | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 src/helpers/pull-helpers/has-collaborator-converted.ts diff --git a/src/helpers/pull-helpers/has-collaborator-converted.ts b/src/helpers/pull-helpers/has-collaborator-converted.ts new file mode 100644 index 0000000..e2c9001 --- /dev/null +++ b/src/helpers/pull-helpers/has-collaborator-converted.ts @@ -0,0 +1,33 @@ +import { Context } from "../../types"; + +export async function hasCollaboratorConvertedPr( + context: Context<"pull_request.opened" | "pull_request.ready_for_review" | "pull_request.converted_to_draft"> +) { + const { logger, payload, octokit } = context; + const { number, organization, repository, action, pull_request } = payload; + const { owner, name } = repository; + + logger.info(`${organization}/${repository}#${number} - ${action}`); + + const timeline = await context.octokit.issues.listEvents({ + owner: owner.login, + repo: name, + issue_number: number, + }); + + const usersThatConvertedToDraft = timeline.data.filter((event) => event.event === "converted_to_draft").map((event) => event.actor.login); + const usersThatReadiedForReview = timeline.data.filter((event) => event.event === "ready_for_review").map((event) => event.actor.login); + + const reviews = await octokit.pulls.listReviews({ + owner: owner.login, + repo: name, + pull_number: number, + }); + + const reviewers = reviews.data + .filter((review) => review.user?.type === "User" && review.author_association === "COLLABORATOR" && review.user?.login !== pull_request.user.login) + .map((review) => review.user?.login) + .filter((login): login is string => !!login); + + return reviewers?.some((reviewer) => usersThatConvertedToDraft.includes(reviewer) || usersThatReadiedForReview.includes(reviewer)); +} From d5359e85c3b0af2a690ff9848772c792d756e414 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:47:43 +0100 Subject: [PATCH 29/59] chore: move hardcoded MAX_TOKENS into constants.ts --- src/adapters/openai/constants.ts | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 src/adapters/openai/constants.ts diff --git a/src/adapters/openai/constants.ts b/src/adapters/openai/constants.ts new file mode 100644 index 0000000..9016b50 --- /dev/null +++ b/src/adapters/openai/constants.ts @@ -0,0 +1,3 @@ +// this should probably be passed in via the config + +export const MAX_COMPLETION_TOKENS = 7000; From 258c0779e903a3a0996f978529ffca5d2a2cec89 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:48:13 +0100 Subject: [PATCH 30/59] chore: tool handling --- src/adapters/openai/helpers/completions.ts | 176 ++++++++++++--------- 1 file changed, 105 insertions(+), 71 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 5a0f1e2..ed14bbc 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -1,16 +1,12 @@ import OpenAI from "openai"; import { Context } from "../../../types"; import { SuperOpenAi } from "./openai"; -const MAX_TOKENS = 7000; - -export interface ResponseFromLlm { - answer: string; - tokenUsage: { - input: number; - output: number; - total: number; - }; -} +import { logger } from "../../../helpers/errors"; +import { appendToConversation } from "./append-to-base-chat-history"; +import { getAnswerAndTokenUsage } from "./get-answer-and-token-usage"; +import { CreationParams, ResponseFromLlm, ToolCallResponse } from "../types"; +import { MAX_COMPLETION_TOKENS } from "../constants"; +// import { LLM_TOOLS } from "./llm-tools"; export class Completions extends SuperOpenAi { protected context: Context; @@ -20,66 +16,18 @@ export class Completions extends SuperOpenAi { this.context = context; } - private _createSystemMessage(systemMessage: string, additionalContext: string[], localContext: string[], groundTruths: string[], botName: string) { - // safer to use array join than string concatenation - const parts = [ - "You Must obey the following ground truths: [", - groundTruths.join(":"), - "]\n", - systemMessage, - "Your name is : ", - botName, - "\n", - "Primary Context: ", - additionalContext.join("\n"), - "\nLocal Context: ", - localContext.join("\n"), - ]; - - return parts.join("\n"); - } - - async createCompletion({ - systemMessage, - prompt, - model = "o1-mini", - additionalContext, - localContext, - groundTruths, - botName, - }: { - systemMessage: string; - prompt: string; - model: string; - additionalContext: string[]; - localContext: string[]; - groundTruths: string[]; - botName: string; - }): Promise { + async createCompletion(params: CreationParams, messages?: OpenAI.Chat.Completions.ChatCompletionMessageParam[]): Promise { + const { model } = params; const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ + // tools: LLM_TOOLS, might not be a good idea to have this available for the general chatbot model: model, - messages: [ - { - role: "system", - content: [ - { - type: "text", - text: this._createSystemMessage(systemMessage, additionalContext, localContext, groundTruths, botName), - }, - ], - }, - { - role: "user", - content: [ - { - type: "text", - text: prompt, - }, - ], - }, - ], + messages: messages || appendToConversation(params), temperature: 0.2, - max_tokens: MAX_TOKENS, + // This value is now deprecated in favor of max_completion_tokens, and is not compatible with o1 series models. + // max_COMPLETION_tokens: MAX_COMPLETION_TOKENS, + + /**An upper bound for the number of tokens that can be generated for a completion, including visible output tokens and reasoning tokens. */ + max_completion_tokens: MAX_COMPLETION_TOKENS, top_p: 0.5, frequency_penalty: 0, presence_penalty: 0, @@ -87,10 +35,96 @@ export class Completions extends SuperOpenAi { type: "text", }, }); - const answer = res.choices[0].message; - if (answer && answer.content && res.usage) { - return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } }; + + await this.handleFunctionCalling(res, params); + + return getAnswerAndTokenUsage(res); + } + + async handleFunctionCalling(res: OpenAI.Chat.Completions.ChatCompletion, params: CreationParams) { + const { systemMessage, prompt, model, additionalContext, localContext, groundTruths, botName } = params; + if (res.choices[0].finish_reason === "function_call") { + const toolCalls = res.choices[0].message.tool_calls; + const choiceMessage = res.choices[0]["message"]; + + if (!toolCalls) { + return; + } + + const fnCallResults: ToolCallResponse[] = []; + + for (const toolCall of toolCalls) { + const { name, arguments: args } = toolCall.function; + let parsedArgs: { should_convert: boolean } = JSON.parse(args); + + if (name === "convert_pull_request_to_draft") { + try { + parsedArgs = JSON.parse(args); + } catch (er) { + throw logger.error("Error parsing args for convert_pull_request_to_draft", { + args, + er, + }); + } + let fnCallResponse; + + if (!parsedArgs.should_convert) { + fnCallResponse = { + role: "tool", + content: "pull request meets the specification, no action taken.", + tool_call_id: toolCall.id, + }; + } else { + let number; + + if ("pull_request" in this.context.payload) { + number = this.context.payload.pull_request.number; + } else if ("issue" in this.context.payload) { + number = this.context.payload.issue.number; + } + + if (!number) { + throw logger.error("No pull request or issue number found in payload"); + } + + await this.context.octokit.pulls.update({ + owner: this.context.payload.repository.owner.login, + repo: this.context.payload.repository.name, + pull_number: number, + draft: true, + }); + + fnCallResponse = { + role: "tool", + content: "pull request did not meet the specification, converted to draft.", + tool_call_id: toolCall.id, + }; + } + + fnCallResults.push({ + response: choiceMessage, + tool_call_response: { + content: fnCallResponse.content, + role: "tool", + tool_call_id: toolCall.id, + }, + }); + } + } + const newChat = appendToConversation(params, fnCallResults); + + return await this.createCompletion( + { + systemMessage, + prompt, + model, + additionalContext, + localContext, + groundTruths, + botName, + }, + newChat + ); } - return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } }; } } From 0f8ef7013180fd9de1b3d8689ec62c9c51236874 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:49:26 +0100 Subject: [PATCH 31/59] chore: relocate helper fns --- .../helpers/append-to-base-chat-history.ts | 35 +++++++++++++++++++ .../openai/helpers/create-system-msg.ts | 18 ++++++++++ .../helpers/get-answer-and-token-usage.ts | 18 ++++++++++ 3 files changed, 71 insertions(+) create mode 100644 src/adapters/openai/helpers/append-to-base-chat-history.ts create mode 100644 src/adapters/openai/helpers/create-system-msg.ts create mode 100644 src/adapters/openai/helpers/get-answer-and-token-usage.ts diff --git a/src/adapters/openai/helpers/append-to-base-chat-history.ts b/src/adapters/openai/helpers/append-to-base-chat-history.ts new file mode 100644 index 0000000..472bf39 --- /dev/null +++ b/src/adapters/openai/helpers/append-to-base-chat-history.ts @@ -0,0 +1,35 @@ +import { createSystemMessage } from "./create-system-msg"; +import { ChatHistory, CreationParams, ToolCallResponse } from "../types"; + +export function appendToConversation(params: CreationParams, toolCallsToAppend: ToolCallResponse[] = []): ChatHistory { + const { systemMessage, prompt, additionalContext, localContext, groundTruths, botName } = params; + const baseChat: ChatHistory = [ + { + role: "system", + content: [ + { + type: "text", + text: createSystemMessage(systemMessage, additionalContext, localContext, groundTruths, botName), + }, + ], + }, + { + role: "user", + content: [ + { + type: "text", + text: prompt, + }, + ], + }, + ]; + + if (toolCallsToAppend.length > 0) { + toolCallsToAppend.forEach((toolCallResponse) => { + baseChat.push(toolCallResponse.response); + baseChat.push(toolCallResponse.tool_call_response); + }); + } + + return baseChat; +} diff --git a/src/adapters/openai/helpers/create-system-msg.ts b/src/adapters/openai/helpers/create-system-msg.ts new file mode 100644 index 0000000..94ecfca --- /dev/null +++ b/src/adapters/openai/helpers/create-system-msg.ts @@ -0,0 +1,18 @@ +export function createSystemMessage(systemMessage: string, additionalContext: string[], localContext: string[], groundTruths: string[], botName: string) { + // safer to use array join than string concatenation + const parts = [ + "You Must obey the following ground truths: [", + groundTruths.join(":"), + "]\n", + systemMessage, + "Your name is : ", + botName, + "\n", + "Primary Context: ", + additionalContext.join("\n"), + "\nLocal Context: ", + localContext.join("\n"), + ]; + + return parts.join("\n"); +} diff --git a/src/adapters/openai/helpers/get-answer-and-token-usage.ts b/src/adapters/openai/helpers/get-answer-and-token-usage.ts new file mode 100644 index 0000000..abe9310 --- /dev/null +++ b/src/adapters/openai/helpers/get-answer-and-token-usage.ts @@ -0,0 +1,18 @@ +import OpenAI from "openai"; +import { ResponseFromLlm } from "../types"; + +export function getAnswerAndTokenUsage(apiResponse: OpenAI.Chat.Completions.ChatCompletion): ResponseFromLlm { + const answer = apiResponse.choices[0].message; + if (answer && answer.content && apiResponse.usage) { + return { + answer: answer.content, + tokenUsage: { + input: apiResponse.usage.prompt_tokens, + output: apiResponse.usage.completion_tokens, + total: apiResponse.usage.total_tokens, + outputDetails: apiResponse.usage.completion_tokens_details, + }, + }; + } + return { answer: "", tokenUsage: { input: 0, output: 0, total: 0, outputDetails: { reasoning_tokens: 0 } } }; +} From 82cbb1e1eeeb7caf98e87b685beb209577f58fb4 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:50:47 +0100 Subject: [PATCH 32/59] chore: use my original agent logic - untested --- src/adapters/openai/helpers/call-handler.ts | 171 ++++++++++++++++++++ src/handlers/pull-precheck.ts | 16 +- 2 files changed, 178 insertions(+), 9 deletions(-) create mode 100644 src/adapters/openai/helpers/call-handler.ts diff --git a/src/adapters/openai/helpers/call-handler.ts b/src/adapters/openai/helpers/call-handler.ts new file mode 100644 index 0000000..857ac54 --- /dev/null +++ b/src/adapters/openai/helpers/call-handler.ts @@ -0,0 +1,171 @@ +import OpenAI from "openai"; +import { LLM_FUNCTIONS, LLM_TOOLS } from "./llm-tools"; +import { Context } from "../../../types"; +import { getIssueNumberFromPayload } from "../../../helpers/get-issue-no-from-payload"; +import { logger } from "../../../helpers/errors"; +import { ChatHistory, ResponseFromLlm } from "../types"; +import { getAnswerAndTokenUsage } from "./get-answer-and-token-usage"; + +export async function handleChat(context: Context, chatHistory: ChatHistory) { + const response = await singleResponse(context, chatHistory); + return await handleResponse(context, response, chatHistory); +} + +async function singleResponse(context: Context, chatHistory: ChatHistory) { + const { + config: { model }, + env: { OPENAI_API_KEY }, + } = context; + const openAi = new OpenAI({ + apiKey: OPENAI_API_KEY, + }); + + return await openAi.chat.completions.create({ + messages: chatHistory, + model, + max_tokens: 7000, + temperature: 0, + tools: LLM_TOOLS, + tool_choice: "auto", + }); +} + +async function handleResponse( + context: Context, + response: OpenAI.Chat.Completions.ChatCompletion, + chatHistory: ChatHistory +): Promise { + let chainCount = 0; + let toolIndex = 0; + let funcName = response.choices[0].message.tool_calls?.[0].function?.name; + let funcParams = response.choices[0].message.tool_calls?.[0].function?.arguments; + const toolCalls = response.choices[0].message.tool_calls?.length; + + const answerAndUsage = getAnswerAndTokenUsage(response); + + if (!toolCalls) { + return { + ...answerAndUsage, + chatHistory, + }; + } + + while (toolCalls > 0) { + chainCount++; + console.log(`Chain count: ${chainCount}`); + console.log(`Response ${chainCount}: ${response.choices[0].message.content}`); + const toolCallFn = agentCommands.find((command) => command.name === funcName); + + let argObj: Record; + if (funcParams) { + argObj = JSON.parse(funcParams); + } else { + argObj = {}; + } + + try { + if (toolCallFn && toolCallFn.func) { + const issueNumber = getIssueNumberFromPayload(context.payload); + const args = toolCallFn?.expectedArgs.map((arg: string) => argObj[arg]) || []; + const result = await toolCallFn?.func(...args, { + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + octokit: context.octokit, + pull_number: issueNumber, + }); + + chatHistory.push({ + role: "tool", + content: result, + tool_call_id: response.choices[0].message.tool_calls?.[toolIndex]?.id || "", + }); + } + } catch (err) { + console.log("===================================="); + console.log("err:", err); + console.log("===================================="); + } + toolIndex++; + + if (!response.choices[0].message.tool_calls?.[toolIndex]) { + break; + } + + funcName = response.choices[0].message.tool_calls?.[toolIndex]?.function.name; + funcParams = response.choices[0].message.tool_calls?.[toolIndex]?.function.arguments; + } + + response = await singleResponse(context, chatHistory); + + const lastResponse = getAnswerAndTokenUsage(response); + + if (!lastResponse.answer) { + throw logger.error("No response found in handleResponse", { + response, + chatHistory, + chainCount, + toolCalls, + toolIndex, + }); + } + const { + tokenUsage: { outputDetails: lastOutputDetails }, + } = lastResponse; + const { + tokenUsage: { outputDetails: firstOutputDetails }, + } = answerAndUsage; + + let totalReasoningTokens = 0; + + if (lastOutputDetails && lastOutputDetails.reasoning_tokens) { + totalReasoningTokens += lastOutputDetails.reasoning_tokens; + } + + if (firstOutputDetails && firstOutputDetails.reasoning_tokens) { + totalReasoningTokens += firstOutputDetails.reasoning_tokens; + } + + return { + answer: lastResponse.answer, + chatHistory, + tokenUsage: { + input: answerAndUsage.tokenUsage.input + lastResponse.tokenUsage.input, + output: answerAndUsage.tokenUsage.output + lastResponse.tokenUsage.output, + total: answerAndUsage.tokenUsage.total + lastResponse.tokenUsage.total, + outputDetails: { + reasoning_tokens: totalReasoningTokens, + }, + }, + }; +} + +function isValidTool(name: string) { + return LLM_TOOLS.some((tool) => tool.function.name === `${name}Tool`); +} + +type AgentCommand = { + name: string; + // eslint-disable-next-line @typescript-eslint/ban-types + func: Function; + expectedArgs: string[]; +}; + +/** + * Handles function calling/response chaining for our models. + */ +const agentCommands: AgentCommand[] = LLM_TOOLS.map((tool) => { + // tools should be named like: fnNameTool > fnName (convertPullToDraftTool > convertPullToDraft) + // where fnNameTool is the api consumed by the LLM and fnName is the actual function + const fnName = tool.function.name.replace("Tool", ""); + + if (!isValidTool(fnName)) { + throw new Error(`Invalid tool called: ${fnName}`); + } + + return { + name: tool.function.name, + // eslint-disable-next-line @typescript-eslint/ban-types + func: LLM_FUNCTIONS.find((fn) => fn.name === fnName) as Function, + expectedArgs: JSON.parse(JSON.stringify(tool.function.parameters?.required)) as string[], + }; +}); diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts index 68c2835..d19fe90 100644 --- a/src/handlers/pull-precheck.ts +++ b/src/handlers/pull-precheck.ts @@ -6,6 +6,7 @@ import { hasCollaboratorConvertedPr } from "../helpers/pull-helpers/has-collabor import { Context, SupportedEvents } from "../types"; import { CallbackResult } from "../types/proxy"; import { findGroundTruths } from "./find-ground-truths"; +import { handleChat } from "../adapters/openai/helpers/call-handler"; // import { handleLlmQueryOutput } from "./llm-query-output"; export async function performPullPrecheck( @@ -14,6 +15,7 @@ export async function performPullPrecheck( const { logger, payload } = context; const { pull_request } = payload; + // Check if PR is in draft mode, closed, or if we can perform a review if (pull_request.draft) { return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; } else if (pull_request.state === "closed") { @@ -27,14 +29,6 @@ export async function performPullPrecheck( return await handleCodeReview(context); } -/** -Contributor must open as draft first then ready it for review. -Context is: issue spec and PR diff -output: what's missing compared to the spec, review as requested changes and convert to draft. Pass = commented status. -conditions: -- collaborator converts the PR, bot should not interact again -- one review per day - */ export async function handleCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">): Promise { const { logger, @@ -67,8 +61,12 @@ export async function handleCodeReview(context: Context<"pull_request.opened" | botName: UBIQUITY_OS_APP_NAME, }; - const llmResponse = await context.adapters.openai.completions.createCompletion(creationOptions); + const llmResponse = await handleChat(context, [ + { role: "system", content: creationOptions.systemMessage }, + { role: "user", content: creationOptions.prompt }, + ]); console.log(creationOptions, llmResponse); return { status: 200, reason: "Success" }; + // const llmResponse = await context.adapters.openai.completions.createCompletion(creationOptions); // return handleLlmQueryOutput(context, llmResponse); } From e0f2d51d2be601f790a980471d75aafeab208395 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:51:35 +0100 Subject: [PATCH 33/59] chore: adapters/openai/types and llm tools --- src/adapters/openai/helpers/llm-tools.ts | 25 +++++++++++++++++ src/adapters/openai/types.ts | 34 ++++++++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 src/adapters/openai/helpers/llm-tools.ts create mode 100644 src/adapters/openai/types.ts diff --git a/src/adapters/openai/helpers/llm-tools.ts b/src/adapters/openai/helpers/llm-tools.ts new file mode 100644 index 0000000..6d1ae98 --- /dev/null +++ b/src/adapters/openai/helpers/llm-tools.ts @@ -0,0 +1,25 @@ +import OpenAI from "openai"; +import { convertPullToDraft } from "../../../helpers/pull-helpers/convert-pull-to-draft"; + +export const convertPullToDraftTool: OpenAI.Chat.Completions.ChatCompletionTool = { + type: "function", + function: { + name: "convertPullToDraftTool", + description: "Convert a pull request that does not meet the spec back to draft mode.", + parameters: { + type: "object", + properties: { + should_convert: { + type: "boolean", + description: "Whether to convert the pull request to draft mode.", + }, + }, + required: ["should_convert"], + additionalProperties: false, + }, + }, +}; + +export const LLM_TOOLS = [convertPullToDraftTool]; +export const LLM_FUNCTIONS = [convertPullToDraft]; +export type ToolFunctions = typeof LLM_FUNCTIONS; diff --git a/src/adapters/openai/types.ts b/src/adapters/openai/types.ts new file mode 100644 index 0000000..1f0497d --- /dev/null +++ b/src/adapters/openai/types.ts @@ -0,0 +1,34 @@ +import OpenAI from "openai"; + +export type ChatHistory = OpenAI.Chat.Completions.ChatCompletionMessageParam[]; + +export type TokenUsage = { + input: number; + output: number; + total: number; + outputDetails?: OpenAI.Completions.CompletionUsage.CompletionTokensDetails; +}; + +export type ResponseFromLlm = { + answer: string; + tokenUsage: TokenUsage; +}; + +export type CreationParams = { + systemMessage: string; + prompt: string; + model: string; + additionalContext: string[]; + localContext: string[]; + groundTruths: string[]; + botName: string; +}; + +export type ToolCallResponse = { + response: OpenAI.Chat.Completions.ChatCompletionMessage; + tool_call_response: { + role: "tool"; + content: string; + tool_call_id: string; + }; +}; From 5852dc49be3c60c19d6760cff8004ad603edcc31 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:52:58 +0100 Subject: [PATCH 34/59] chore: convertPrToDraft refactored for llm tooling --- package.json | 2 +- .../pull-helpers/can-perform-review.ts | 47 ++++++++++--------- .../pull-helpers/convert-pull-to-draft.ts | 46 +++++++++--------- .../pull-helpers/submit-code-review.ts | 2 +- 4 files changed, 53 insertions(+), 44 deletions(-) diff --git a/package.json b/package.json index e87e607..bf2ae6f 100644 --- a/package.json +++ b/package.json @@ -86,4 +86,4 @@ "@commitlint/config-conventional" ] } -} \ No newline at end of file +} diff --git a/src/helpers/pull-helpers/can-perform-review.ts b/src/helpers/pull-helpers/can-perform-review.ts index 44e2f80..d0d38b6 100644 --- a/src/helpers/pull-helpers/can-perform-review.ts +++ b/src/helpers/pull-helpers/can-perform-review.ts @@ -1,33 +1,38 @@ import { Context } from "../../types"; export async function canPerformReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">) { - const { logger, payload } = context; - const { number, organization, repository, action } = payload; - const { owner, name } = repository; + const { logger, payload } = context; + const { number, organization, repository, action } = payload; + const { owner, name } = repository; - logger.info(`${organization}/${repository}#${number} - ${action}`); + logger.info(`${organization}/${repository}#${number} - ${action}`); - const timeline = await context.octokit.issues.listEvents({ - owner: owner.login, - repo: name, - issue_number: number, - }); + const timeline = await context.octokit.issues.listEvents({ + owner: owner.login, + repo: name, + issue_number: number, + }); - const reviews = timeline.data.filter((event) => event.event === "reviewed"); - const botReviews = reviews.filter((review) => review.actor.type === "Bot"); + const reviews = timeline.data.filter((event) => event.event === "reviewed"); + const botReviews = reviews.filter((review) => review.actor.type === "Bot"); - const lastReview = botReviews[botReviews.length - 1]; - const lastReviewDate = new Date(lastReview.created_at); - const now = new Date(); + if (!botReviews.length) { + logger.info("No bot reviews found"); + return true; + } - const diff = now.getTime() - lastReviewDate.getTime(); - const ONE_DAY = 24 * 60 * 60 * 1000; + const lastReview = botReviews[botReviews.length - 1]; + const lastReviewDate = new Date(lastReview.created_at); + const now = new Date(); - if (diff < ONE_DAY) { - throw logger.error("Only one review per day is allowed"); - } + const diff = now.getTime() - lastReviewDate.getTime(); + const ONE_DAY = 24 * 60 * 60 * 1000; - logger.info("One review per day check passed"); + if (diff < ONE_DAY) { + throw logger.error("Only one review per day is allowed"); + } - return true; + logger.info("One review per day check passed"); + + return true; } diff --git a/src/helpers/pull-helpers/convert-pull-to-draft.ts b/src/helpers/pull-helpers/convert-pull-to-draft.ts index 80e3aad..d88b209 100644 --- a/src/helpers/pull-helpers/convert-pull-to-draft.ts +++ b/src/helpers/pull-helpers/convert-pull-to-draft.ts @@ -1,23 +1,27 @@ -import { Context } from "../../types"; +import { Octokit } from "@octokit/rest"; -export async function convertPullToDraft(context: Context<"pull_request.opened" | "pull_request.ready_for_review">) { - const { logger, payload } = context; - const { number, organization, repository, action } = payload; - const { owner, name } = repository; - - logger.info(`${organization}/${repository}#${number} - ${action}`); - - try { - await context.octokit.pulls.update({ - owner: owner.login, - repo: name, - pull_number: number, - draft: true, - }); - - logger.info("Pull request converted to draft"); - } catch (er) { - throw logger.error("Failed to convert pull request to draft", { err: er }); - } +export async function convertPullToDraft( + shouldConvert: boolean, + params: { + owner: string; + repo: string; + pull_number: number; + octokit: Octokit; + } +) { + if (!shouldConvert) { + return `No action taken. The pull request will remain in its current state.`; + } + const { owner, repo, pull_number } = params; + try { + await params.octokit.pulls.update({ + owner, + repo, + pull_number, + draft: true, + }); + return `Successfully converted pull request to draft mode.`; + } catch (err) { + return `Failed to convert pull request to draft mode: ${JSON.stringify(err)}`; + } } - diff --git a/src/helpers/pull-helpers/submit-code-review.ts b/src/helpers/pull-helpers/submit-code-review.ts index 16893e9..8995492 100644 --- a/src/helpers/pull-helpers/submit-code-review.ts +++ b/src/helpers/pull-helpers/submit-code-review.ts @@ -21,4 +21,4 @@ export async function submitCodeReview(context: Context<"pull_request.opened" | } catch (er) { throw logger.error("Failed to submit code review", { err: er }); } -} \ No newline at end of file +} From 35e04294e388ecc887c8baf2d3b23622ac831b5c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 12:54:02 +0000 Subject: [PATCH 35/59] chore: allow null body, format --- src/handlers/ask-llm.ts | 15 ++++++--------- src/helpers/issue-fetching.ts | 8 +++----- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index f0a262c..801f8e7 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -51,21 +51,18 @@ export async function askGpt(context: Context, question: string, formattedChat: try { const [similarComments, similarIssues] = await Promise.all([ comment.findSimilarComments(question, 1 - similarityThreshold, ""), - issue.findSimilarIssues(question, 1 - similarityThreshold, "") + issue.findSimilarIssues(question, 1 - similarityThreshold, ""), ]); const similarText = [ - ...similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || [], - ...similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || [] + ...(similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || []), + ...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []), ]; - formattedChat = formattedChat.filter(text => text); + formattedChat = formattedChat.filter((text) => text); const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; - const [languages, { dependencies, devDependencies }] = await Promise.all([ - fetchRepoLanguageStats(context), - fetchRepoDependencies(context) - ]); + const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); const groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); @@ -76,4 +73,4 @@ export async function askGpt(context: Context, question: string, formattedChat: } catch (error) { throw bubbleUpErrorComment(context, error, false); } -} \ No newline at end of file +} diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 9f96d7b..6f01340 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -41,21 +41,19 @@ export async function fetchLinkedIssues(params: FetchParams) { if (!issue) { return { streamlinedComments: {}, linkedIssues: [], specAndBodies: {}, seen: new Set() }; } - if (!issue.body || !issue.html_url) { - throw logger.error("Issue body or URL not found", { issueUrl: issue.html_url }); - } if (!params.owner || !params.repo) { throw logger.error("Owner or repo not found"); } + const issueKey = createKey(issue.html_url); const [owner, repo, issueNumber] = splitKey(issueKey); - const linkedIssues: LinkedIssues[] = [{ body: issue.body, comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; + const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; const specAndBodies: Record = {}; const seen = new Set([issueKey]); comments.push({ - body: issue.body, + body: issue.body || "", user: issue.user, id: issue.id.toString(), org: params.owner, From 228f486a02b306b6409ba1ce88e0321f0ad63620 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:12:15 +0000 Subject: [PATCH 36/59] chore: remove github-diff-tool --- package.json | 1 - yarn.lock | 412 +-------------------------------------------------- 2 files changed, 1 insertion(+), 412 deletions(-) diff --git a/package.json b/package.json index ece6959..d3a72c2 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,6 @@ "@supabase/supabase-js": "^2.45.4", "@ubiquity-os/ubiquity-os-logger": "^1.3.2", "dotenv": "^16.4.5", - "github-diff-tool": "^1.0.6", "gpt-tokenizer": "^2.5.1", "openai": "^4.63.0", "typebox-validators": "0.3.5", diff --git a/yarn.lock b/yarn.lock index ad622f5..6588153 100644 --- a/yarn.lock +++ b/yarn.lock @@ -979,11 +979,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== -"@esbuild/aix-ppc64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz#b57697945b50e99007b4c2521507dc613d4a648c" - integrity sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw== - "@esbuild/android-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd" @@ -994,11 +989,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== -"@esbuild/android-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz#1add7e0af67acefd556e407f8497e81fddad79c0" - integrity sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w== - "@esbuild/android-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d" @@ -1009,11 +999,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== -"@esbuild/android-arm@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.24.0.tgz#ab7263045fa8e090833a8e3c393b60d59a789810" - integrity sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew== - "@esbuild/android-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1" @@ -1024,11 +1009,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== -"@esbuild/android-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.24.0.tgz#e8f8b196cfdfdd5aeaebbdb0110983460440e705" - integrity sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ== - "@esbuild/darwin-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276" @@ -1039,11 +1019,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== -"@esbuild/darwin-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz#2d0d9414f2acbffd2d86e98253914fca603a53dd" - integrity sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw== - "@esbuild/darwin-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb" @@ -1054,11 +1029,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== -"@esbuild/darwin-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz#33087aab31a1eb64c89daf3d2cf8ce1775656107" - integrity sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA== - "@esbuild/freebsd-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2" @@ -1069,11 +1039,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== -"@esbuild/freebsd-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz#bb76e5ea9e97fa3c753472f19421075d3a33e8a7" - integrity sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA== - "@esbuild/freebsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4" @@ -1084,11 +1049,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== -"@esbuild/freebsd-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz#e0e2ce9249fdf6ee29e5dc3d420c7007fa579b93" - integrity sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ== - "@esbuild/linux-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb" @@ -1099,11 +1059,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== -"@esbuild/linux-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz#d1b2aa58085f73ecf45533c07c82d81235388e75" - integrity sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g== - "@esbuild/linux-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a" @@ -1114,11 +1069,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== -"@esbuild/linux-arm@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz#8e4915df8ea3e12b690a057e77a47b1d5935ef6d" - integrity sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw== - "@esbuild/linux-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a" @@ -1129,11 +1079,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== -"@esbuild/linux-ia32@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz#8200b1110666c39ab316572324b7af63d82013fb" - integrity sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA== - "@esbuild/linux-loong64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72" @@ -1144,11 +1089,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== -"@esbuild/linux-loong64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz#6ff0c99cf647504df321d0640f0d32e557da745c" - integrity sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g== - "@esbuild/linux-mips64el@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289" @@ -1159,11 +1099,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== -"@esbuild/linux-mips64el@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz#3f720ccd4d59bfeb4c2ce276a46b77ad380fa1f3" - integrity sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA== - "@esbuild/linux-ppc64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7" @@ -1174,11 +1109,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== -"@esbuild/linux-ppc64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz#9d6b188b15c25afd2e213474bf5f31e42e3aa09e" - integrity sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ== - "@esbuild/linux-riscv64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09" @@ -1189,11 +1119,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== -"@esbuild/linux-riscv64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz#f989fdc9752dfda286c9cd87c46248e4dfecbc25" - integrity sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw== - "@esbuild/linux-s390x@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829" @@ -1204,11 +1129,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== -"@esbuild/linux-s390x@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz#29ebf87e4132ea659c1489fce63cd8509d1c7319" - integrity sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g== - "@esbuild/linux-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4" @@ -1219,11 +1139,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== -"@esbuild/linux-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz#4af48c5c0479569b1f359ffbce22d15f261c0cef" - integrity sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA== - "@esbuild/netbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462" @@ -1234,16 +1149,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== -"@esbuild/netbsd-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz#1ae73d23cc044a0ebd4f198334416fb26c31366c" - integrity sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg== - -"@esbuild/openbsd-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz#5d904a4f5158c89859fd902c427f96d6a9e632e2" - integrity sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg== - "@esbuild/openbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691" @@ -1254,11 +1159,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== -"@esbuild/openbsd-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz#4c8aa88c49187c601bae2971e71c6dc5e0ad1cdf" - integrity sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q== - "@esbuild/sunos-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273" @@ -1269,11 +1169,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== -"@esbuild/sunos-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz#8ddc35a0ea38575fa44eda30a5ee01ae2fa54dd4" - integrity sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA== - "@esbuild/win32-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f" @@ -1284,11 +1179,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== -"@esbuild/win32-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz#6e79c8543f282c4539db684a207ae0e174a9007b" - integrity sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA== - "@esbuild/win32-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03" @@ -1299,11 +1189,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== -"@esbuild/win32-ia32@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz#057af345da256b7192d18b676a02e95d0fa39103" - integrity sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw== - "@esbuild/win32-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061" @@ -1314,11 +1199,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== -"@esbuild/win32-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz#168ab1c7e1c318b922637fad8f339d48b01e1244" - integrity sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA== - "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -1748,83 +1628,11 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@octokit/app@^15.0.0": - version "15.1.0" - resolved "https://registry.yarnpkg.com/@octokit/app/-/app-15.1.0.tgz#b330d8826be088ec8d1d43a59dc27ef57d1232b2" - integrity sha512-TkBr7QgOmE6ORxvIAhDbZsqPkF7RSqTY4pLTtUQCvr6dTXqvi2fFo46q3h1lxlk/sGMQjqyZ0kEahkD/NyzOHg== - dependencies: - "@octokit/auth-app" "^7.0.0" - "@octokit/auth-unauthenticated" "^6.0.0" - "@octokit/core" "^6.1.2" - "@octokit/oauth-app" "^7.0.0" - "@octokit/plugin-paginate-rest" "^11.0.0" - "@octokit/types" "^13.0.0" - "@octokit/webhooks" "^13.0.0" - -"@octokit/auth-app@^7.0.0": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-app/-/auth-app-7.1.1.tgz#d8916ad01e6ffb0a0a50507aa613e91fe7a49b93" - integrity sha512-kRAd6yelV9OgvlEJE88H0VLlQdZcag9UlLr7dV0YYP37X8PPDvhgiTy66QVhDXdyoT0AleFN2w/qXkPdrSzINg== - dependencies: - "@octokit/auth-oauth-app" "^8.1.0" - "@octokit/auth-oauth-user" "^5.1.0" - "@octokit/request" "^9.1.1" - "@octokit/request-error" "^6.1.1" - "@octokit/types" "^13.4.1" - lru-cache "^10.0.0" - universal-github-app-jwt "^2.2.0" - universal-user-agent "^7.0.0" - -"@octokit/auth-oauth-app@^8.0.0", "@octokit/auth-oauth-app@^8.1.0": - version "8.1.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz#6204affa6e86f535016799cadf2af9befe5e893c" - integrity sha512-5UtmxXAvU2wfcHIPPDWzVSAWXVJzG3NWsxb7zCFplCWEmMCArSZV0UQu5jw5goLQXbFyOr5onzEH37UJB3zQQg== - dependencies: - "@octokit/auth-oauth-device" "^7.0.0" - "@octokit/auth-oauth-user" "^5.0.1" - "@octokit/request" "^9.0.0" - "@octokit/types" "^13.0.0" - universal-user-agent "^7.0.0" - -"@octokit/auth-oauth-device@^7.0.0", "@octokit/auth-oauth-device@^7.0.1": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.1.tgz#7b4f8f97cbcadbe9894d48cde4406dbdef39875a" - integrity sha512-HWl8lYueHonuyjrKKIup/1tiy0xcmQCdq5ikvMO1YwkNNkxb6DXfrPjrMYItNLyCP/o2H87WuijuE+SlBTT8eg== - dependencies: - "@octokit/oauth-methods" "^5.0.0" - "@octokit/request" "^9.0.0" - "@octokit/types" "^13.0.0" - universal-user-agent "^7.0.0" - -"@octokit/auth-oauth-user@^5.0.1", "@octokit/auth-oauth-user@^5.1.0": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.1.tgz#4f1570c6ee15bb9ddc3dcca83308dcaa159e3848" - integrity sha512-rRkMz0ErOppdvEfnemHJXgZ9vTPhBuC6yASeFaB7I2yLMd7QpjfrL1mnvRPlyKo+M6eeLxrKanXJ9Qte29SRsw== - dependencies: - "@octokit/auth-oauth-device" "^7.0.1" - "@octokit/oauth-methods" "^5.0.0" - "@octokit/request" "^9.0.1" - "@octokit/types" "^13.0.0" - universal-user-agent "^7.0.0" - "@octokit/auth-token@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7" integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA== -"@octokit/auth-token@^5.0.0": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-5.1.1.tgz#3bbfe905111332a17f72d80bd0b51a3e2fa2cf07" - integrity sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA== - -"@octokit/auth-unauthenticated@^6.0.0", "@octokit/auth-unauthenticated@^6.0.0-beta.1": - version "6.1.0" - resolved "https://registry.yarnpkg.com/@octokit/auth-unauthenticated/-/auth-unauthenticated-6.1.0.tgz#de0fe923bb06ed93aea526ab99972a98c546d0bf" - integrity sha512-zPSmfrUAcspZH/lOFQnVnvjQZsIvmfApQH6GzJrkIunDooU1Su2qt2FfMTSVPRp7WLTQyC20Kd55lF+mIYaohQ== - dependencies: - "@octokit/request-error" "^6.0.1" - "@octokit/types" "^13.0.0" - "@octokit/core@^5.0.1", "@octokit/core@^5.0.2": version "5.2.0" resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea" @@ -1838,27 +1646,6 @@ before-after-hook "^2.2.0" universal-user-agent "^6.0.0" -"@octokit/core@^6.0.0", "@octokit/core@^6.1.2": - version "6.1.2" - resolved "https://registry.yarnpkg.com/@octokit/core/-/core-6.1.2.tgz#20442d0a97c411612da206411e356014d1d1bd17" - integrity sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg== - dependencies: - "@octokit/auth-token" "^5.0.0" - "@octokit/graphql" "^8.0.0" - "@octokit/request" "^9.0.0" - "@octokit/request-error" "^6.0.1" - "@octokit/types" "^13.0.0" - before-after-hook "^3.0.2" - universal-user-agent "^7.0.0" - -"@octokit/endpoint@^10.0.0": - version "10.1.1" - resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-10.1.1.tgz#1a9694e7aef6aa9d854dc78dd062945945869bcc" - integrity sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q== - dependencies: - "@octokit/types" "^13.0.0" - universal-user-agent "^7.0.2" - "@octokit/endpoint@^9.0.1": version "9.0.5" resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-9.0.5.tgz#e6c0ee684e307614c02fc6ac12274c50da465c44" @@ -1876,44 +1663,6 @@ "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" -"@octokit/graphql@^8.0.0": - version "8.1.1" - resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-8.1.1.tgz#3cacab5f2e55d91c733e3bf481d3a3f8a5f639c4" - integrity sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg== - dependencies: - "@octokit/request" "^9.0.0" - "@octokit/types" "^13.0.0" - universal-user-agent "^7.0.0" - -"@octokit/oauth-app@^7.0.0": - version "7.1.3" - resolved "https://registry.yarnpkg.com/@octokit/oauth-app/-/oauth-app-7.1.3.tgz#a0f256dd185e7c00bfbc3e6bc3c5aad66e42c609" - integrity sha512-EHXbOpBkSGVVGF1W+NLMmsnSsJRkcrnVmDKt0TQYRBb6xWfWzoi9sBD4DIqZ8jGhOWO/V8t4fqFyJ4vDQDn9bg== - dependencies: - "@octokit/auth-oauth-app" "^8.0.0" - "@octokit/auth-oauth-user" "^5.0.1" - "@octokit/auth-unauthenticated" "^6.0.0-beta.1" - "@octokit/core" "^6.0.0" - "@octokit/oauth-authorization-url" "^7.0.0" - "@octokit/oauth-methods" "^5.0.0" - "@types/aws-lambda" "^8.10.83" - universal-user-agent "^7.0.0" - -"@octokit/oauth-authorization-url@^7.0.0": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz#0e17c2225eb66b58ec902d02b6f1315ffe9ff04b" - integrity sha512-ooXV8GBSabSWyhLUowlMIVd9l1s2nsOGQdlP2SQ4LnkEsGXzeCvbSbCPdZThXhEFzleGPwbapT0Sb+YhXRyjCA== - -"@octokit/oauth-methods@^5.0.0": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@octokit/oauth-methods/-/oauth-methods-5.1.2.tgz#fd31d2a69f4c91d1abc1ed1814dda5252c697e02" - integrity sha512-C5lglRD+sBlbrhCUTxgJAFjWgJlmTx5bQ7Ch0+2uqRjYv7Cfb5xpX4WuSC9UgQna3sqRGBL9EImX9PvTpMaQ7g== - dependencies: - "@octokit/oauth-authorization-url" "^7.0.0" - "@octokit/request" "^9.1.0" - "@octokit/request-error" "^6.1.0" - "@octokit/types" "^13.0.0" - "@octokit/openapi-types@^20.0.0": version "20.0.0" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-20.0.0.tgz#9ec2daa0090eeb865ee147636e0c00f73790c6e5" @@ -1929,16 +1678,6 @@ resolved "https://registry.yarnpkg.com/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.2.1.tgz#08b974f1e83a75c4d3ce23f798c7667b433bf4cd" integrity sha512-msAU1oTSm0ZmvAE0xDemuF4tVs5i0xNnNGtNmr4EuATi+1Rn8cZDetj6NXioSf5LwnxEc209COa/WOSbjuhLUA== -"@octokit/openapi-webhooks-types@8.3.0": - version "8.3.0" - resolved "https://registry.yarnpkg.com/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.3.0.tgz#a7a4da00c0f27f7f5708eb3fcebefa08f8d51125" - integrity sha512-vKLsoR4xQxg4Z+6rU/F65ItTUz/EXbD+j/d4mlq2GW8TsA4Tc8Kdma2JTAAJ5hrKWUQzkR/Esn2fjsqiVRYaQg== - -"@octokit/plugin-paginate-graphql@^5.0.0": - version "5.2.4" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-5.2.4.tgz#b6afda7b3f24cb93d2ab822ec8eac664a5d325d0" - integrity sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA== - "@octokit/plugin-paginate-rest@11.3.1": version "11.3.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.1.tgz#fe92d04b49f134165d6fbb716e765c2f313ad364" @@ -1946,13 +1685,6 @@ dependencies: "@octokit/types" "^13.5.0" -"@octokit/plugin-paginate-rest@^11.0.0": - version "11.3.5" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.5.tgz#a1929b3ba3dc7b63bc73bb6d3c7a3faf2a9c7649" - integrity sha512-cgwIRtKrpwhLoBi0CUNuY83DPGRMaWVjqVI/bGKsLJ4PzyWZNaEmhHroI2xlrVXkk6nFv0IsZpOp+ZWSWUS2AQ== - dependencies: - "@octokit/types" "^13.6.0" - "@octokit/plugin-paginate-rest@^9.0.0": version "9.2.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz#2e2a2f0f52c9a4b1da1a3aa17dabe3c459b9e401" @@ -1979,30 +1711,6 @@ dependencies: "@octokit/types" "^12.6.0" -"@octokit/plugin-rest-endpoint-methods@^13.0.0": - version "13.2.6" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.6.tgz#b9d343dbe88a6cb70cc7fa16faa98f0a29ffe654" - integrity sha512-wMsdyHMjSfKjGINkdGKki06VEkgdEldIGstIEyGX0wbYHGByOwN/KiM+hAAlUwAtPkP3gvXtVQA9L3ITdV2tVw== - dependencies: - "@octokit/types" "^13.6.1" - -"@octokit/plugin-retry@^7.0.0": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@octokit/plugin-retry/-/plugin-retry-7.1.2.tgz#242e2d19a72a50b5113bb25d7d2c622ce0373fa0" - integrity sha512-XOWnPpH2kJ5VTwozsxGurw+svB2e61aWlmk5EVIYZPwFK5F9h4cyPyj9CIKRyMXMHSwpIsI3mPOdpMmrRhe7UQ== - dependencies: - "@octokit/request-error" "^6.0.0" - "@octokit/types" "^13.0.0" - bottleneck "^2.15.3" - -"@octokit/plugin-throttling@^9.0.0": - version "9.3.2" - resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-9.3.2.tgz#cc05180e45e769d6726c5faed157e9ad3b6ab8c0" - integrity sha512-FqpvcTpIWFpMMwIeSoypoJXysSAQ3R+ALJhXXSG1HTP3YZOIeLmcNcimKaXxTcws+Sh6yoRl13SJ5r8sXc1Fhw== - dependencies: - "@octokit/types" "^13.0.0" - bottleneck "^2.15.3" - "@octokit/request-error@^5.1.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30" @@ -2012,13 +1720,6 @@ deprecation "^2.0.0" once "^1.4.0" -"@octokit/request-error@^6.0.0", "@octokit/request-error@^6.1.0", "@octokit/request-error@^6.1.1": - version "6.1.5" - resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-6.1.5.tgz#907099e341c4e6179db623a0328d678024f54653" - integrity sha512-IlBTfGX8Yn/oFPMwSfvugfncK2EwRLjzbrpifNaMY8o/HTEAFqCA1FZxjD9cWvSKBHgrIhc4CSBIzMxiLsbzFQ== - dependencies: - "@octokit/types" "^13.0.0" - "@octokit/request-error@^6.0.1": version "6.1.1" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-6.1.1.tgz#bed1b5f52ce7fefb1077a92bf42124ff36f73f2c" @@ -2036,16 +1737,6 @@ "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" -"@octokit/request@^9.0.0", "@octokit/request@^9.0.1", "@octokit/request@^9.1.0", "@octokit/request@^9.1.1": - version "9.1.3" - resolved "https://registry.yarnpkg.com/@octokit/request/-/request-9.1.3.tgz#42b693bc06238f43af3c037ebfd35621c6457838" - integrity sha512-V+TFhu5fdF3K58rs1pGUJIDH5RZLbZm5BI+MNF+6o/ssFNT4vWlCh/tVpF3NxGtP15HUxTTMUbsG5llAuU2CZA== - dependencies: - "@octokit/endpoint" "^10.0.0" - "@octokit/request-error" "^6.0.1" - "@octokit/types" "^13.1.0" - universal-user-agent "^7.0.2" - "@octokit/rest@20.1.1": version "20.1.1" resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-20.1.1.tgz#ec775864f53fb42037a954b9a40d4f5275b3dc95" @@ -2070,13 +1761,6 @@ dependencies: "@octokit/openapi-types" "^22.2.0" -"@octokit/types@^13.4.1", "@octokit/types@^13.6.0", "@octokit/types@^13.6.1": - version "13.6.1" - resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.6.1.tgz#432fc6c0aaae54318e5b2d3e15c22ac97fc9b15f" - integrity sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g== - dependencies: - "@octokit/openapi-types" "^22.2.0" - "@octokit/webhooks-methods@^5.0.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@octokit/webhooks-methods/-/webhooks-methods-5.1.0.tgz#13b6c08f89902c1ab0ddf31c6eeeec9c2772cfe6" @@ -2092,15 +1776,6 @@ "@octokit/webhooks-methods" "^5.0.0" aggregate-error "^5.0.0" -"@octokit/webhooks@^13.0.0": - version "13.3.0" - resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-13.3.0.tgz#fd5d54d47c789c75d60a00eb04e982152d7c654a" - integrity sha512-TUkJLtI163Bz5+JK0O+zDkQpn4gKwN+BovclUvCj6pI/6RXrFqQvUMRS2M+Rt8Rv0qR3wjoMoOPmpJKeOh0nBg== - dependencies: - "@octokit/openapi-webhooks-types" "8.3.0" - "@octokit/request-error" "^6.0.1" - "@octokit/webhooks-methods" "^5.0.0" - "@open-draft/deferred-promise@^2.2.0": version "2.2.0" resolved "https://registry.yarnpkg.com/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz#4a822d10f6f0e316be4d67b4d4f8c9a124b073bd" @@ -2214,11 +1889,6 @@ "@supabase/realtime-js" "2.10.2" "@supabase/storage-js" "2.7.0" -"@types/aws-lambda@^8.10.83": - version "8.10.145" - resolved "https://registry.yarnpkg.com/@types/aws-lambda/-/aws-lambda-8.10.145.tgz#b2d31a987f4888e5553ff1819f57cafa475594d9" - integrity sha512-dtByW6WiFk5W5Jfgz1VM+YPA21xMXTuSFoLYIDY0L44jDLLflVPtZkYuu3/YxpGcvjzKFBZLU+GyKjR0HOYtyw== - "@types/babel__core@^7.1.14": version "7.20.5" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017" @@ -2795,11 +2465,6 @@ before-after-hook@^2.2.0: resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== -before-after-hook@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-3.0.2.tgz#d5665a5fa8b62294a5aa0a499f933f4a1016195d" - integrity sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A== - binary-extensions@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" @@ -2810,11 +2475,6 @@ blake3-wasm@^2.1.5: resolved "https://registry.yarnpkg.com/blake3-wasm/-/blake3-wasm-2.1.5.tgz#b22dbb84bc9419ed0159caa76af4b1b132e6ba52" integrity sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g== -bottleneck@^2.15.3: - version "2.19.5" - resolved "https://registry.yarnpkg.com/bottleneck/-/bottleneck-2.19.5.tgz#5df0b90f59fd47656ebe63c78a98419205cadd91" - integrity sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw== - brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" @@ -3477,7 +3137,7 @@ dot-prop@^5.1.0: dependencies: is-obj "^2.0.0" -dotenv@^16.3.1, dotenv@^16.4.5: +dotenv@^16.4.5: version "16.4.5" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== @@ -3645,36 +3305,6 @@ esbuild@0.17.19: "@esbuild/win32-ia32" "0.17.19" "@esbuild/win32-x64" "0.17.19" -esbuild@^0.24.0: - version "0.24.0" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.24.0.tgz#f2d470596885fcb2e91c21eb3da3b3c89c0b55e7" - integrity sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ== - optionalDependencies: - "@esbuild/aix-ppc64" "0.24.0" - "@esbuild/android-arm" "0.24.0" - "@esbuild/android-arm64" "0.24.0" - "@esbuild/android-x64" "0.24.0" - "@esbuild/darwin-arm64" "0.24.0" - "@esbuild/darwin-x64" "0.24.0" - "@esbuild/freebsd-arm64" "0.24.0" - "@esbuild/freebsd-x64" "0.24.0" - "@esbuild/linux-arm" "0.24.0" - "@esbuild/linux-arm64" "0.24.0" - "@esbuild/linux-ia32" "0.24.0" - "@esbuild/linux-loong64" "0.24.0" - "@esbuild/linux-mips64el" "0.24.0" - "@esbuild/linux-ppc64" "0.24.0" - "@esbuild/linux-riscv64" "0.24.0" - "@esbuild/linux-s390x" "0.24.0" - "@esbuild/linux-x64" "0.24.0" - "@esbuild/netbsd-x64" "0.24.0" - "@esbuild/openbsd-arm64" "0.24.0" - "@esbuild/openbsd-x64" "0.24.0" - "@esbuild/sunos-x64" "0.24.0" - "@esbuild/win32-arm64" "0.24.0" - "@esbuild/win32-ia32" "0.24.0" - "@esbuild/win32-x64" "0.24.0" - esbuild@~0.21.4: version "0.21.5" resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" @@ -4182,15 +3812,6 @@ git-raw-commits@^4.0.0: meow "^12.0.1" split2 "^4.0.0" -github-diff-tool@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/github-diff-tool/-/github-diff-tool-1.0.6.tgz#e633b46397db850ad3dc0d500450357cb7ee26f9" - integrity sha512-DOqKck+WUj3HsfOwef5cjS32qqOkKWFncIl4erBtp2+dfccrkSi6Ee14mKGnrQaAhMrx/9LWFh8X5KGivZVY8A== - dependencies: - dotenv "^16.3.1" - esbuild "^0.24.0" - octokit "^4.0.2" - glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" @@ -5389,11 +5010,6 @@ log-update@^6.0.0: strip-ansi "^7.1.0" wrap-ansi "^9.0.0" -lru-cache@^10.0.0: - version "10.4.3" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" - integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== - lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -5716,22 +5332,6 @@ object.assign@^4.1.5: has-symbols "^1.0.3" object-keys "^1.1.1" -octokit@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/octokit/-/octokit-4.0.2.tgz#775d68d363cdaec69d7b73d3dc82ae909d30f59b" - integrity sha512-wbqF4uc1YbcldtiBFfkSnquHtECEIpYD78YUXI6ri1Im5OO2NLo6ZVpRdbJpdnpZ05zMrVPssNiEo6JQtea+Qg== - dependencies: - "@octokit/app" "^15.0.0" - "@octokit/core" "^6.0.0" - "@octokit/oauth-app" "^7.0.0" - "@octokit/plugin-paginate-graphql" "^5.0.0" - "@octokit/plugin-paginate-rest" "^11.0.0" - "@octokit/plugin-rest-endpoint-methods" "^13.0.0" - "@octokit/plugin-retry" "^7.0.0" - "@octokit/plugin-throttling" "^9.0.0" - "@octokit/request-error" "^6.0.0" - "@octokit/types" "^13.0.0" - ohash@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/ohash/-/ohash-1.1.4.tgz#ae8d83014ab81157d2c285abf7792e2995fadd72" @@ -6932,21 +6532,11 @@ unicorn-magic@^0.1.0: resolved "https://registry.yarnpkg.com/unicorn-magic/-/unicorn-magic-0.1.0.tgz#1bb9a51c823aaf9d73a8bfcd3d1a23dde94b0ce4" integrity sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ== -universal-github-app-jwt@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/universal-github-app-jwt/-/universal-github-app-jwt-2.2.0.tgz#dc6c8929e76f1996a766ba2a08fb420f73365d77" - integrity sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ== - universal-user-agent@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== -universal-user-agent@^7.0.0, universal-user-agent@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-7.0.2.tgz#52e7d0e9b3dc4df06cc33cb2b9fd79041a54827e" - integrity sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q== - universalify@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" From 841b9a597a8e8c71aceda9bf9f26213fad5e01ad Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:13:05 +0000 Subject: [PATCH 37/59] chore: return body hash matching, simplify diff fetch --- src/helpers/format-chat-history.ts | 192 ++++++++++++++--------------- src/helpers/issue-fetching.ts | 58 ++------- src/helpers/issue-handling.ts | 4 +- src/helpers/issue.ts | 20 ++- 4 files changed, 125 insertions(+), 149 deletions(-) diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index ecb2b38..6832355 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -1,17 +1,9 @@ import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/llm"; import { createKey, streamlineComments } from "../handlers/comments"; -import { fetchPullRequestDiff, fetchIssue, fetchIssueComments, fetchLinkedPullRequests } from "./issue-fetching"; +import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; import { splitKey } from "./issue"; -/** - * Formats the chat history by combining streamlined comments and specifications or bodies for issues and pull requests. - * - * @param context - The context object containing information about the current GitHub event. - * @param streamlined - A record of streamlined comments for each issue or pull request. - * @param specAndBodies - A record of specifications or bodies for each issue or pull request. - * @returns A promise that resolves to a formatted string representing the chat history. - */ export async function formatChatHistory( context: Context, streamlined: Record, @@ -19,60 +11,72 @@ export async function formatChatHistory( ): Promise { const keys = new Set([...Object.keys(streamlined), ...Object.keys(specAndBodies), createKey(context.payload.issue.html_url)]); let runningTokenCount = 0; + const chatHistory = await Promise.all( Array.from(keys).map(async (key) => { - const isCurrentIssue = key === createKey(context.payload.issue.html_url); - const [currentTokenCount, result] = await createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue, runningTokenCount); + const [currentTokenCount, result] = await createContextBlockSection({ + context, + key, + streamlined, + specAndBodies, + isCurrentIssue: key === createKey(context.payload.issue.html_url), + currentContextTokenCount: runningTokenCount, + }); runningTokenCount += currentTokenCount; return result; }) ); + return Array.from(new Set(chatHistory)); } -/** - * Generates the correct header string based on the provided parameters. - * - * @param prDiff - The pull request diff string, if available. - * @param issueNumber - The issue number. - * @param isCurrentIssue - A boolean indicating if this is the current issue. - * @param isBody - A boolean indicating if this is for the body of the issue. - * @returns The formatted header string. - */ -function getCorrectHeaderString(prDiff: string | null, issueNumber: number, isCurrentIssue: boolean, isBody: boolean) { - const headerTemplates = { - pull: `Pull #${issueNumber} Request`, - issue: `Issue #${issueNumber} Specification`, - convo: `Issue #${issueNumber} Conversation`, +function getCorrectHeaderString(prDiff: string | null, isCurrentIssue: boolean, isConvo: boolean) { + const strings = { + convo: { + pull: { + linked: `Linked Pull Request Conversation`, + current: `Current Pull Request Conversation`, + }, + issue: { + linked: `Linked Task Conversation`, + current: `Current Task Conversation`, + }, + }, + spec: { + pull: { + linked: `Linked Pull Request Specification`, + current: `Current Pull Request Specification`, + }, + issue: { + linked: `Linked Task Specification`, + current: `Current Task Specification`, + }, + }, }; - const type = prDiff ? "pull" : "issue"; - const context = isCurrentIssue ? "current" : "linked"; - const bodyContext = isBody ? "convo" : type; - - return `${context.charAt(0).toUpperCase() + context.slice(1)} ${headerTemplates[bodyContext]}`; + const category = isConvo ? "convo" : "spec"; + const issueType = prDiff ? "pull" : "issue"; + const issueStatus = isCurrentIssue ? "current" : "linked"; + return strings[category][issueType][issueStatus]; } -/** - * Creates a context block section for the given issue or pull request. - * - * @param context - The context object containing information about the current GitHub event. - * @param key - The unique key representing the issue or pull request. - * @param streamlined - A record of streamlined comments for each issue or pull request. - * @param specAndBodies - A record of specifications or bodies for each issue or pull request. - * @param isCurrentIssue - A boolean indicating whether the key represents the current issue. - * @returns A formatted string representing the context block section. - */ -async function createContextBlockSection( - context: Context, - key: string, - streamlined: Record, - specAndBodies: Record, - isCurrentIssue: boolean, - currentContextTokenCount: number = 0 -): Promise<[number, string]> { - const maxTokens = context.config.maxTokens; +async function createContextBlockSection({ + context, + key, + streamlined, + specAndBodies, + isCurrentIssue, + currentContextTokenCount, +}: { + context: Context; + key: string; + streamlined: Record; + specAndBodies: Record; + isCurrentIssue: boolean; + currentContextTokenCount: number; +}): Promise<[number, string]> { let comments = streamlined[key]; + if (!comments || comments.length === 0) { const [owner, repo, number] = splitKey(key); const { comments: fetchedComments } = await fetchIssueComments({ @@ -83,24 +87,15 @@ async function createContextBlockSection( }); comments = streamlineComments(fetchedComments)[key]; } + const [org, repo, issueNum] = key.split("/"); const issueNumber = parseInt(issueNum); if (!issueNumber || isNaN(issueNumber)) { throw context.logger.error("Issue number is not valid"); } - const pulls = (await fetchLinkedPullRequests(org, repo, issueNumber, context)) || []; - const prDiffs = await Promise.all(pulls.map((pull) => fetchPullRequestDiff(context, org, repo, pull.number))); - let prDiff: string | null = null; - for (const pullDiff of prDiffs.flat()) { - if (currentContextTokenCount > maxTokens) break; - if (pullDiff) { - const tokenLength = await context.adapters.openai.completions.findTokenLength(pullDiff.diff); - if (currentContextTokenCount + tokenLength > maxTokens) break; - currentContextTokenCount += tokenLength; - prDiff = (prDiff ? prDiff + "\n" : "") + pullDiff.diff; - } - } - const specHeader = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, false); + + const prDiff = await fetchPullRequestDiff(context, org, repo, issueNumber); + let specOrBody = specAndBodies[key]; if (!specOrBody) { specOrBody = @@ -113,61 +108,60 @@ async function createContextBlockSection( }) )?.body || "No specification or body available"; } - const specOrBodyBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader)]; - currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(specOrBody); - const header = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, true); - const repoString = `${org}/${repo} #${issueNumber}`; - const block = [specOrBodyBlock.join(""), createHeader(header, repoString), createComment({ issueNumber, repo, org, comments }), createFooter(header)]; - currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(block.join(" ")); + + const specHeader = getCorrectHeaderString(prDiff, isCurrentIssue, false); + const blockHeader = getCorrectHeaderString(prDiff, isCurrentIssue, true); + + const specBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader, key)]; + const commentSection = createComment({ issueNumber, repo, org, comments }, specOrBody); + + let block; + if (commentSection) { + block = [specBlock.join(""), createHeader(blockHeader, key), commentSection, specOrBody, createFooter(blockHeader, key)]; + } else { + block = [specBlock.join("")]; + } + if (!prDiff) { + currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(block.join("")); return [currentContextTokenCount, block.join("")]; } - const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), prDiff, createFooter("\nLinked Pull Request Code Diff")]; - return [currentContextTokenCount, block.join("") + diffBlock.join("")]; + + const blockWithDiff = [block.join(""), createHeader(`Pull Request Diff`, key), prDiff, createFooter(`Pull Request Diff`, key)]; + currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(blockWithDiff.join("")); + return [currentContextTokenCount, blockWithDiff.join("")]; } -/** - * Creates a header string for the given content and repository string. - * - * @param content - The content to include in the header. - * @param repoString - The repository string to include in the header. - * @returns A formatted header string. - */ function createHeader(content: string, repoString: string) { return `=== ${content} === ${repoString} ===\n\n`; } -/** - * Creates a footer string for the given content. - * - * @param content - The content to include in the footer. - * @returns A formatted footer string. - */ -function createFooter(content: string) { - return `=== End ${content} ===\n\n`; +function createFooter(content: string, repoString: string) { + return `=== End ${content} === ${repoString} ===\n\n`; } -/** - * Creates a comment string from the StreamlinedComments object. - * - * @param comment - The StreamlinedComments object. - * @returns A string representing the comments. - */ -function createComment(comment: StreamlinedComments) { +function createComment(comment: StreamlinedComments, specOrBody: string) { if (!comment.comments) { return ""; } - // Format comments + + const seen = new Set(); + comment.comments = comment.comments.filter((c) => { + if (seen.has(c.id) || c.body === specOrBody) { + return false; + } + seen.add(c.id); + return true; + }); + const formattedComments = comment.comments.map((c) => `${c.id} ${c.user}: ${c.body}\n`); + + if (formattedComments.length === 0) { + return; + } return formattedComments.join(""); } -/** - * Creates a formatted string for the specification or body of an issue. - * - * @param specOrBody - The specification or body content. - * @returns A formatted string representing the specification or body. - */ function createSpecOrBody(specOrBody: string) { return `${specOrBody}\n`; } diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 6f01340..9802659 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,4 +1,3 @@ -import { GithubDiff } from "github-diff-tool"; import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; import { IssueComments, FetchParams, Issue, LinkedIssues, LinkedPullsToIssue, ReviewComments, SimplifiedComment } from "../types/github-types"; @@ -84,7 +83,7 @@ export async function fetchLinkedIssues(params: FetchParams) { } for (const comment of comments) { - const foundIssues = idIssueFromComment(comment.body); + const foundIssues = idIssueFromComment(comment.body, params); const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; if (foundIssues) { for (const linkedIssue of foundIssues) { @@ -149,57 +148,26 @@ export async function mergeCommentsAndFetchSpec( const merged = mergeStreamlinedComments(streamlinedComments, streamed); streamlinedComments = { ...streamlinedComments, ...merged }; } + if (linkedIssue.body) { await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); } } -/** - * Fetches the diff of a pull request. - * - * @param context - The context containing the octokit instance and logger. - * @param org - The organization or owner of the repository. - * @param repo - The name of the repository. - * @param issue - The pull request number. - * @returns A promise that resolves to the diff of the pull request as a string, or null if an error occurs. - */ -export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number): Promise<{ diff: string; diffSize: number }[] | null> { - const { octokit, logger } = context; +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { + const { octokit } = context; + try { - const githubDiff = new GithubDiff(octokit); - //Fetch the statistics of the pull request - const stats = await githubDiff.getPullRequestStats(org, repo, issue); - const files = stats.map((file) => ({ filename: file.filename, diffSizeInBytes: file.diffSizeInBytes })); - //Fetch the diff of the files - const prDiffs = await Promise.all( - files.map(async (file) => { - let diff = null; - try { - diff = await githubDiff.getPullRequestDiff({ - owner: org, - repo, - pullNumber: issue, - filePath: file.filename, - }); - } catch { - logger.error(`Error fetching pull request diff for the file`, { - owner: org, - repo, - pull_number: issue, - file: file.filename, - }); - } - return diff ? { diff: file.filename + diff, diffSize: file.diffSizeInBytes } : null; - }) - ); - return prDiffs.filter((diff): diff is { diff: string; diffSize: number } => diff !== null).sort((a, b) => a.diffSize - b.diffSize); - } catch (error) { - logger.error(`Error fetching pull request diff`, { - err: error, + const diff = await octokit.pulls.get({ owner: org, repo, pull_number: issue, + mediaType: { + format: "diff", + }, }); + return diff.data as unknown as string; + } catch (e) { return null; } } @@ -315,14 +283,14 @@ function castCommentsToSimplifiedComments(comments: (IssueComments | ReviewComme }; } - if ("issue_url" in comment) { + if ("html_url" in comment) { return { body: comment.body, user: comment.user, id: comment.id.toString(), org: params.owner || params.context.payload.repository.owner.login, repo: params.repo || params.context.payload.repository.name, - issueUrl: comment.issue_url, + issueUrl: comment.html_url, }; } diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 3f44225..055f1dd 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -42,7 +42,7 @@ export async function handleSpec( streamlinedComments: Record ) { specAndBodies[key] = specOrBody; - const otherReferences = idIssueFromComment(specOrBody); + const otherReferences = idIssueFromComment(specOrBody, params); if (otherReferences) { for (const ref of otherReferences) { const anotherKey = createKey(ref.url, ref.issueNumber); @@ -87,7 +87,7 @@ export async function handleComment( streamlinedComments: Record, seen: Set ) { - const otherReferences = idIssueFromComment(comment.body); + const otherReferences = idIssueFromComment(comment.body, params); if (otherReferences) { for (const ref of otherReferences) { const key = createKey(ref.url); diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index b63b7d5..a67eb8a 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -58,8 +58,8 @@ export function splitKey(key: string): [string, string, string] { * @param params - Additional parameters that may include context information. * @returns An array of linked issues or null if no issues are found. */ -export function idIssueFromComment(comment?: string | null): LinkedIssues[] | null { - const urlMatch = comment?.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+)\/(pull|issues?)\/(\d+)/g); +export function idIssueFromComment(comment?: string | null, params?: FetchParams): LinkedIssues[] | null { + const urlMatch = comment?.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/g); const response: LinkedIssues[] = []; if (urlMatch) { @@ -68,6 +68,20 @@ export function idIssueFromComment(comment?: string | null): LinkedIssues[] | nu }); } + /** + * These can only reference issues within the same repository + * so params works here + */ + const hashMatch = comment?.match(/#(\d+)/g); + if (hashMatch && hashMatch.length > 0) { + hashMatch.forEach((hash) => { + const issueNumber = hash.replace("#", ""); + const owner = params?.context.payload.repository?.owner?.login || ""; + const repo = params?.context.payload.repository?.name || ""; + response.push({ body: undefined, owner, repo, issueNumber: parseInt(issueNumber), url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` }); + }); + } + return response.length > 0 ? response : null; } @@ -151,7 +165,7 @@ export async function fetchCodeLinkedFromIssue( return { body: content, id: parsedUrl.path }; } } catch (error) { - console.error(`Error fetching content from ${url}:`, error); + logger.error(`Error fetching content from ${url}:`, { er: error }); } return null; }) From dafdd09fb5302b3aa364d537ea230eb262580ae2 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:13:51 +0000 Subject: [PATCH 38/59] chore: update logs to capture full final ctx --- src/adapters/openai/helpers/completions.ts | 38 ++++++++++++++-------- src/handlers/ask-llm.ts | 7 ---- src/handlers/comment-created-callback.ts | 20 ++++++------ 3 files changed, 34 insertions(+), 31 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 3a5f24a..18e8e33 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -3,6 +3,7 @@ import { Context } from "../../../types"; import { SuperOpenAi } from "./openai"; import { CompletionsModelHelper, ModelApplications } from "../../../types/llm"; import { encode } from "gpt-tokenizer"; +import { logger } from "../../../helpers/errors"; export interface CompletionsType { answer: string; @@ -23,7 +24,7 @@ export class Completions extends SuperOpenAi { } async createCompletion( - prompt: string, + query: string, model: string = "o1-mini", additionalContext: string[], localContext: string[], @@ -31,6 +32,26 @@ export class Completions extends SuperOpenAi { botName: string, maxTokens: number ): Promise { + const numTokens = await this.findTokenLength(query, additionalContext, localContext, groundTruths); + logger.info(`Number of tokens: ${numTokens}`); + + const sysMsg = [ + "You Must obey the following ground truths: [", + groundTruths.join(":"), + "]\n", + "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.", + "Your name is : ", + botName, + "\n", + "Main Context (Provide additional precedence in terms of information): ", + localContext.join("\n"), + "Secondary Context: ", + additionalContext.join("\n"), + ].join("\n"); + + logger.info(`System message: ${sysMsg}`); + logger.info(`Query: ${query}`); + const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ model: model, messages: [ @@ -39,18 +60,7 @@ export class Completions extends SuperOpenAi { content: [ { type: "text", - text: - "You Must obey the following ground truths: [" + - groundTruths.join(":") + - "]\n" + - "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + - "Your name is : " + - botName + - "\n" + - "Main Context (Provide additional precedence in terms of information): " + - localContext.join("\n") + - "Secondary Context: " + - additionalContext.join("\n"), + text: sysMsg, }, ], }, @@ -59,7 +69,7 @@ export class Completions extends SuperOpenAi { content: [ { type: "text", - text: prompt, + text: query, }, ], }, diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index 801f8e7..b76c36f 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -25,7 +25,6 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - context.logger.info(`${formattedChat.join("")}`); return await askGpt(context, question, formattedChat); } @@ -45,7 +44,6 @@ export async function askGpt(context: Context, question: string, formattedChat: voyage: { reranker }, openai: { completions }, }, - logger, } = context; try { @@ -63,12 +61,7 @@ export async function askGpt(context: Context, question: string, formattedChat: const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); - const groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); - - const numTokens = await completions.findTokenLength(question, rerankedText, formattedChat, groundTruths); - logger.info(`Number of tokens: ${numTokens}`); - return completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); } catch (error) { throw bubbleUpErrorComment(context, error, false); diff --git a/src/handlers/comment-created-callback.ts b/src/handlers/comment-created-callback.ts index ae44fbe..b11c36c 100644 --- a/src/handlers/comment-created-callback.ts +++ b/src/handlers/comment-created-callback.ts @@ -27,24 +27,24 @@ export async function issueCommentCreatedCallback( return { status: 204, reason: logger.info("Comment is from a bot. Skipping.").logMessage.raw }; } - logger.info(`Asking question: ${question}`); - let commentToPost; try { const response = await askQuestion(context, question); const { answer, tokenUsage, groundTruths } = response; if (!answer) { throw logger.error(`No answer from OpenAI`); } - logger.info(`Answer: ${answer}`, { tokenUsage }); - const metadata = { - groundTruths, - tokenUsage, - }; + const metadataString = createStructuredMetadata( + "ubiquity-os-llm-response", + logger.info(`Answer: ${answer}`, { + metadata: { + groundTruths, + tokenUsage, + }, + }) + ); - const metadataString = createStructuredMetadata("LLM Ground Truths and Token Usage", logger.info(`Answer: ${answer}`, { metadata })); - commentToPost = answer + metadataString; - await addCommentToIssue(context, commentToPost); + await addCommentToIssue(context, answer + metadataString); return { status: 200, reason: logger.info("Comment posted successfully").logMessage.raw }; } catch (error) { throw await bubbleUpErrorComment(context, error, false); From 98c683986132cd169fe7d7821995033fcbf97408 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:35:45 +0000 Subject: [PATCH 39/59] chore: list normal comment on PR, remove readme from comments --- src/helpers/issue-fetching.ts | 30 ++++++++---------------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 9802659..32cb164 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -60,28 +60,6 @@ export async function fetchLinkedIssues(params: FetchParams) { issueUrl: issue.html_url, }); - //Fetch the README of the repository - try { - const readme = await pullReadmeFromRepoForIssue(params); - if (readme) { - comments.push({ - body: readme, - user: issue.user, - id: issue.id.toString(), - org: params.owner, - repo: params.repo, - issueUrl: issue.html_url, - }); - } - } catch (error) { - params.context.logger.error(`Error fetching README`, { - err: error, - owner, - repo, - issue, - }); - } - for (const comment of comments) { const foundIssues = idIssueFromComment(comment.body, params); const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; @@ -218,6 +196,14 @@ export async function fetchIssueComments(params: FetchParams) { pull_number: issueNum || payload.issue.number, }); reviewComments = response.data; + + const response2 = await octokit.rest.issues.listComments({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + + issueComments = response2.data; } else { const response = await octokit.rest.issues.listComments({ owner: owner || payload.repository.owner.login, From d727f796c4293206f90441795012b6101fd28a6e Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:39:19 +0000 Subject: [PATCH 40/59] chore: readme block section, fetch only for current issue repo --- src/helpers/format-chat-history.ts | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 6832355..3826948 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -2,7 +2,7 @@ import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/llm"; import { createKey, streamlineComments } from "../handlers/comments"; import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; -import { splitKey } from "./issue"; +import { pullReadmeFromRepoForIssue, splitKey } from "./issue"; export async function formatChatHistory( context: Context, @@ -76,7 +76,6 @@ async function createContextBlockSection({ currentContextTokenCount: number; }): Promise<[number, string]> { let comments = streamlined[key]; - if (!comments || comments.length === 0) { const [owner, repo, number] = splitKey(key); const { comments: fetchedComments } = await fetchIssueComments({ @@ -85,6 +84,7 @@ async function createContextBlockSection({ repo, issueNum: parseInt(number), }); + comments = streamlineComments(fetchedComments)[key]; } @@ -119,9 +119,19 @@ async function createContextBlockSection({ if (commentSection) { block = [specBlock.join(""), createHeader(blockHeader, key), commentSection, specOrBody, createFooter(blockHeader, key)]; } else { + // in this scenario we have no task/PR conversation, just the spec block = [specBlock.join("")]; } + // only inject the README if this is the current issue as that's likely most relevant + if (isCurrentIssue) { + const readme = await pullReadmeFromRepoForIssue({ context, owner: org, repo }); + if (readme) { + const readmeBlock = readme ? [createHeader("README", key), createSpecOrBody(readme), createFooter("README", key)] : []; + block = block.concat(readmeBlock); + } + } + if (!prDiff) { currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(block.join("")); return [currentContextTokenCount, block.join("")]; @@ -156,6 +166,7 @@ function createComment(comment: StreamlinedComments, specOrBody: string) { const formattedComments = comment.comments.map((c) => `${c.id} ${c.user}: ${c.body}\n`); + if (formattedComments.length === 0) { return; } From 0a63c346112481da1ff1dd7eff617230b02ae31a Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:02:55 +0000 Subject: [PATCH 41/59] chore: askGpt > askLlm --- src/handlers/ask-llm.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index b76c36f..ee31f61 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -25,7 +25,7 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - return await askGpt(context, question, formattedChat); + return await askLlm(context, question, formattedChat); } /** @@ -35,7 +35,7 @@ export async function askQuestion(context: Context, question: string) { * @param formattedChat - The formatted chat history to provide context to GPT * @returns completions - The completions generated by GPT **/ -export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { +export async function askLlm(context: Context, question: string, formattedChat: string[]): Promise { const { env: { UBIQUITY_OS_APP_NAME }, config: { model, similarityThreshold, maxTokens }, From e2f6fad44708b0459e70e2fa6f6f72a31a2ced7e Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:03:37 +0000 Subject: [PATCH 42/59] chore: return empty array not a throw --- src/handlers/ground-truths/chat-bot.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/handlers/ground-truths/chat-bot.ts b/src/handlers/ground-truths/chat-bot.ts index 6d087d2..de32e8c 100644 --- a/src/handlers/ground-truths/chat-bot.ts +++ b/src/handlers/ground-truths/chat-bot.ts @@ -68,6 +68,7 @@ export async function fetchRepoLanguageStats(context: Context) { return Array.from(Object.entries(stats)).sort((a, b) => b[1] - a[1]); } catch (err) { - throw logger.error(`Error fetching language stats for ${owner}/${repo}`, { err }); + logger.error(`Error fetching language stats for ${owner}/${repo}`, { err }); + return []; } } From 9784ec510808a26033c05d8788fc66197b7a4efc Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:04:34 +0000 Subject: [PATCH 43/59] chore: format, ctx window formatting fixes --- src/helpers/format-chat-history.ts | 23 +++++++++++------------ src/helpers/issue-fetching.ts | 9 +-------- src/types/llm.ts | 4 ++-- 3 files changed, 14 insertions(+), 22 deletions(-) diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 3826948..380057b 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -117,10 +117,10 @@ async function createContextBlockSection({ let block; if (commentSection) { - block = [specBlock.join(""), createHeader(blockHeader, key), commentSection, specOrBody, createFooter(blockHeader, key)]; + block = [specBlock.join("\n"), createHeader(blockHeader, key), commentSection, createFooter(blockHeader, key)]; } else { // in this scenario we have no task/PR conversation, just the spec - block = [specBlock.join("")]; + block = [specBlock.join("\n")]; } // only inject the README if this is the current issue as that's likely most relevant @@ -134,20 +134,24 @@ async function createContextBlockSection({ if (!prDiff) { currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(block.join("")); - return [currentContextTokenCount, block.join("")]; + return [currentContextTokenCount, block.join("\n")]; } - const blockWithDiff = [block.join(""), createHeader(`Pull Request Diff`, key), prDiff, createFooter(`Pull Request Diff`, key)]; + const blockWithDiff = [block.join("\n"), createHeader(`Pull Request Diff`, key), prDiff, createFooter(`Pull Request Diff`, key)]; currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(blockWithDiff.join("")); - return [currentContextTokenCount, blockWithDiff.join("")]; + return [currentContextTokenCount, blockWithDiff.join("\n")]; } function createHeader(content: string, repoString: string) { - return `=== ${content} === ${repoString} ===\n\n`; + return `=== ${content} === ${repoString} ===\n`; } function createFooter(content: string, repoString: string) { - return `=== End ${content} === ${repoString} ===\n\n`; + return `=== End ${content} === ${repoString} ===\n`; +} + +function createSpecOrBody(specOrBody: string) { + return `${specOrBody}\n`; } function createComment(comment: StreamlinedComments, specOrBody: string) { @@ -166,13 +170,8 @@ function createComment(comment: StreamlinedComments, specOrBody: string) { const formattedComments = comment.comments.map((c) => `${c.id} ${c.user}: ${c.body}\n`); - if (formattedComments.length === 0) { return; } return formattedComments.join(""); } - -function createSpecOrBody(specOrBody: string) { - return `${specOrBody}\n`; -} diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 32cb164..8ed7a02 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -3,14 +3,7 @@ import { Context } from "../types"; import { IssueComments, FetchParams, Issue, LinkedIssues, LinkedPullsToIssue, ReviewComments, SimplifiedComment } from "../types/github-types"; import { StreamlinedComment } from "../types/llm"; import { logger } from "./errors"; -import { - dedupeStreamlinedComments, - fetchCodeLinkedFromIssue, - idIssueFromComment, - mergeStreamlinedComments, - pullReadmeFromRepoForIssue, - splitKey, -} from "./issue"; +import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; /** diff --git a/src/types/llm.ts b/src/types/llm.ts index f01a70d..f05e985 100644 --- a/src/types/llm.ts +++ b/src/types/llm.ts @@ -4,8 +4,8 @@ export type ModelApplications = "code-review" | "chat-bot"; type ChatBotAppParams = { languages: [string, number][]; - dependencies: Record; - devDependencies: Record; + dependencies: Record | null; + devDependencies: Record | null; }; type CodeReviewAppParams = { From e20a2c5e78dc1774180c87d8f55018605b208be0 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:48:13 +0000 Subject: [PATCH 44/59] chore: fix tests --- src/adapters/openai/helpers/completions.ts | 5 +- src/handlers/ask-llm.ts | 3 +- src/plugin.ts | 4 +- tests/main.test.ts | 71 +++++++++++----------- 4 files changed, 40 insertions(+), 43 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 18e8e33..48280d1 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -36,9 +36,8 @@ export class Completions extends SuperOpenAi { logger.info(`Number of tokens: ${numTokens}`); const sysMsg = [ - "You Must obey the following ground truths: [", - groundTruths.join(":"), - "]\n", + "You Must obey the following ground truths: ", + JSON.stringify(groundTruths), "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.", "Your name is : ", botName, diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index ee31f61..cad5c87 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -6,7 +6,7 @@ import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; import { formatChatHistory } from "../helpers/format-chat-history"; import { fetchRepoDependencies, fetchRepoLanguageStats } from "./ground-truths/chat-bot"; import { findGroundTruths } from "./ground-truths/find-ground-truths"; -import { bubbleUpErrorComment } from "../helpers/errors"; +import { bubbleUpErrorComment, logger } from "../helpers/errors"; /** * Asks a question to GPT and returns the response @@ -25,6 +25,7 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + logger.info(`${formattedChat.join("")}`); return await askLlm(context, question, formattedChat); } diff --git a/src/plugin.ts b/src/plugin.ts index 284b3cf..3bc13c8 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,13 +1,13 @@ import { Octokit } from "@octokit/rest"; import { PluginInputs } from "./types"; import { Context } from "./types"; -import { LogLevel, Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Env } from "./types/env"; import { createAdapters } from "./adapters"; import { createClient } from "@supabase/supabase-js"; import { VoyageAIClient } from "voyageai"; import OpenAI from "openai"; import { proxyCallbacks } from "./helpers/callback-proxy"; +import { logger } from "./helpers/errors"; export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); @@ -26,7 +26,7 @@ export async function plugin(inputs: PluginInputs, env: Env) { config: inputs.settings, octokit, env, - logger: new Logs("info" as LogLevel), + logger, adapters: {} as ReturnType, }; context.adapters = createAdapters(supabase, voyageClient, openaiClient, context); diff --git a/tests/main.test.ts b/tests/main.test.ts index 3ceaeb5..5f67bd9 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -2,7 +2,6 @@ import { db } from "./__mocks__/db"; import { server } from "./__mocks__/node"; import usersGet from "./__mocks__/users-get.json"; import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it } from "@jest/globals"; -import { Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Context, SupportedEventsU } from "../src/types"; import { drop } from "@mswjs/data"; import issueTemplate from "./__mocks__/issue-template"; @@ -12,6 +11,7 @@ import { runPlugin } from "../src/plugin"; import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; import { envSchema } from "../src/types/env"; import { CompletionsType } from "../src/adapters/openai/helpers/completions"; +import { logger } from "../src/helpers/errors"; const TEST_QUESTION = "what is pi?"; const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?"; @@ -52,6 +52,7 @@ afterAll(() => server.close()); describe("Ask plugin tests", () => { beforeEach(async () => { + jest.clearAllMocks(); await setupTests(); }); @@ -102,23 +103,6 @@ describe("Ask plugin tests", () => { }); it("should construct the chat history correctly", async () => { - const ctx = createContext(TEST_SLASH_COMMAND); - const infoSpy = jest.spyOn(ctx.logger, "info"); - createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await runPlugin(ctx); - - expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); - expect(infoSpy).toHaveBeenNthCalledWith(4, "Answer: This is a mock answer for the chat", { - caller: LOG_CALLER, - tokenUsage: { - input: 1000, - output: 150, - total: 1150, - }, - }); - }); - - it("should collect the linked issues correctly", async () => { const ctx = createContext(TEST_SLASH_COMMAND); const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([ @@ -129,46 +113,59 @@ describe("Ask plugin tests", () => { ]); await runPlugin(ctx); - - expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); - - const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === + const prompt = `=== Current Task Specification === ubiquity/test-repo/1 === This is a demo spec for a demo task just perfect for testing. - === End Current Issue #1 Specification === - === Current Issue #1 Conversation === ubiquity/test-repo #1 === + === End Current Task Specification === ubiquity/test-repo/1 === + + === Current Task Conversation === ubiquity/test-repo/1 === 1 ubiquity: ${ISSUE_ID_2_CONTENT} [#2](https://www.github.com/ubiquity/test-repo/issues/2) 2 ubiquity: ${TEST_QUESTION} [#1](https://www.github.com/ubiquity/test-repo/issues/1) - === End Current Issue #1 Conversation === + === End Current Task Conversation === ubiquity/test-repo/1 === + + === README === ubiquity/test-repo/1 === + + {"content":"This is a mock README file"} + + === End README === ubiquity/test-repo/1 === - === Linked Issue #2 Specification === ubiquity/test-repo/2 === + === Linked Task Specification === ubiquity/test-repo/2 === Related to issue #3 - === End Linked Issue #2 Specification === + === End Linked Task Specification === ubiquity/test-repo/2 === - === Linked Issue #2 Conversation === ubiquity/test-repo #2 === + === Linked Task Conversation === ubiquity/test-repo/2 === 3 ubiquity: ${ISSUE_ID_3_CONTENT} [#3](https://www.github.com/ubiquity/test-repo/issues/3) - === End Linked Issue #2 Conversation === + === End Linked Task Conversation === ubiquity/test-repo/2 === - === Linked Issue #3 Specification === ubiquity/test-repo/3 === + === Linked Task Specification === ubiquity/test-repo/3 === Just another issue - === End Linked Issue #3 Specification === + === End Linked Task Specification === ubiquity/test-repo/3 === - === Linked Issue #3 Conversation === ubiquity/test-repo #3 === + === Linked Task Conversation === ubiquity/test-repo/3 === 4 ubiquity: Just a comment [#1](https://www.github.com/ubiquity/test-repo/issues/1) - 4 ubiquity: Just a comment [#1](https://www.github.com/ubiquity/test-repo/issues/1) - === End Linked Issue #3 Conversation ===\n - `; + === End Linked Task Conversation === ubiquity/test-repo/3 ===`; const normalizedExpected = normalizeString(prompt); - const normalizedReceived = normalizeString(infoSpy.mock.calls[1][0] as string); + const normalizedReceived = normalizeString(infoSpy.mock.calls[0][0] as string); expect(normalizedReceived).toEqual(normalizedExpected); + expect(infoSpy).toHaveBeenNthCalledWith(2, "Answer: This is a mock answer for the chat", { + caller: LOG_CALLER, + metadata: { + tokenUsage: { + input: 1000, + output: 150, + total: 1150, + }, + groundTruths: ["This is a mock answer for the chat"], + }, + }); }); }); @@ -266,7 +263,7 @@ function createContext(body = TEST_SLASH_COMMAND) { }, owner: "ubiquity", repo: "test-repo", - logger: new Logs("debug"), + logger: logger, config: {}, env: { UBIQUITY_OS_APP_NAME: "UbiquityOS", From f1e5ba7d479fdd54132b7e686da1276dd560509f Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:53:34 +0000 Subject: [PATCH 45/59] chore: sysMsg formatting fix --- src/adapters/openai/helpers/completions.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 48280d1..5a0c06e 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -37,10 +37,9 @@ export class Completions extends SuperOpenAi { const sysMsg = [ "You Must obey the following ground truths: ", - JSON.stringify(groundTruths), + JSON.stringify(groundTruths) + "\n", "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.", - "Your name is : ", - botName, + `Your name is: ${botName}`, "\n", "Main Context (Provide additional precedence in terms of information): ", localContext.join("\n"), From e81745409e7aa99e0c44455172005729deaf95f9 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:12:34 +0000 Subject: [PATCH 46/59] chore: hardcode model token limits --- src/adapters/openai/helpers/completions.ts | 39 +++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 5a0c06e..52299e8 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -23,6 +23,42 @@ export class Completions extends SuperOpenAi { this.context = context; } + getModelMaxTokenLimit(model: string): number { + // could be made more robust, unfortunately, there's no endpoint to get the model token limit + const tokenLimits = new Map([ + ["o1-mini", 128_000], + ["o1-preview", 128_000], + ["gpt-4-turbo", 128_000], + ["gpt-4o", 128_000], + ["gpt-4o-mini", 128_000], + ["gpt-4", 8_192], + ["gpt-3.5-turbo-0125", 16_385], + ["gpt-3.5-turbo", 16_385], + ]); + + return tokenLimits.get(model) || 128_000; + } + + getModelMaxOutputLimit(model: string): number { + // could be made more robust, unfortunately, there's no endpoint to get the model token limit + const tokenLimits = new Map([ + ["o1-mini", 65_536], + ["o1-preview", 32_768], + ["gpt-4-turbo", 4_096], + ["gpt-4o-mini", 16_384], + ["gpt-4o", 16_384], + ["gpt-4", 8_192], + ["gpt-3.5-turbo-0125", 4_096], + ["gpt-3.5-turbo", 4_096], + ]); + + return tokenLimits.get(model) || 16_384; + } + + async getModelTokenLimit(): Promise { + return this.getModelMaxTokenLimit("o1-mini"); + } + async createCompletion( query: string, model: string = "o1-mini", @@ -81,6 +117,7 @@ export class Completions extends SuperOpenAi { type: "text", }, }); + const answer = res.choices[0].message; if (answer && answer.content && res.usage) { return { @@ -128,6 +165,6 @@ export class Completions extends SuperOpenAi { } async findTokenLength(prompt: string, additionalContext: string[] = [], localContext: string[] = [], groundTruths: string[] = []): Promise { - return encode(prompt + additionalContext.join("\n") + localContext.join("\n") + groundTruths.join("\n")).length; + return encode(prompt + additionalContext.join("\n") + localContext.join("\n") + groundTruths.join("\n"), { disallowedSpecial: new Set() }).length; } } From 26f17097bc7ef77857cb17e474050e7aafa63664 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:13:08 +0000 Subject: [PATCH 47/59] chore: hardcode no language/deps responses --- src/handlers/ask-llm.ts | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index cad5c87..c487420 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -62,8 +62,27 @@ export async function askLlm(context: Context, question: string, formattedChat: const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); - const groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); - return completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); + + let groundTruths: string[] = []; + + if (!languages.length) { + groundTruths.push("No languages found in the repository"); + } + + if (!Reflect.ownKeys(dependencies).length) { + groundTruths.push("No dependencies found in the repository"); + } + + if (!Reflect.ownKeys(devDependencies).length) { + groundTruths.push("No devDependencies found in the repository"); + } + + if (groundTruths.length === 3) { + return await completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); + } + + groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); + return await completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); } catch (error) { throw bubbleUpErrorComment(context, error, false); } From c5d4bebf1a920ddd0fd940e39d4224718aafbb3e Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:13:47 +0000 Subject: [PATCH 48/59] chore: readability --- src/handlers/comments.ts | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index b033686..8d1418e 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -10,11 +10,14 @@ import { StreamlinedComment } from "../types/llm"; */ export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { const streamlinedComments: Record = {}; + for (const issue of linkedIssues) { const linkedIssueComments = issue.comments || []; if (linkedIssueComments.length === 0) continue; + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); if (!linkedStreamlinedComments) continue; + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { streamlinedComments[key] = [...(streamlinedComments[key] || []), ...value]; } @@ -74,15 +77,15 @@ export function createKey(issueUrl: string, issue?: number) { */ export function streamlineComments(comments: SimplifiedComment[]) { const streamlined: Record = {}; + for (const comment of comments) { const { user, issueUrl: url, body } = comment; - // Skip bot comments if (user?.type === "Bot") continue; + const key = createKey(url); const [owner, repo] = splitKey(key); - if (!streamlined[key]) { - streamlined[key] = []; - } + streamlined[key] ??= []; + if (user && body) { streamlined[key].push({ user: user.login, From d4b5a900f49ed830d4004ba704a7ff5b61c0db6b Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:20:14 +0000 Subject: [PATCH 49/59] chore: ignore pr template html hashMatch --- src/helpers/issue.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index a67eb8a..2c4697b 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -76,6 +76,10 @@ export function idIssueFromComment(comment?: string | null, params?: FetchParams if (hashMatch && hashMatch.length > 0) { hashMatch.forEach((hash) => { const issueNumber = hash.replace("#", ""); + // the HTML comment in the PR template + if (issueNumber === "1234" && comment?.includes("You must link the issue number e.g.")) { + return; + } const owner = params?.context.payload.repository?.owner?.login || ""; const repo = params?.context.payload.repository?.name || ""; response.push({ body: undefined, owner, repo, issueNumber: parseInt(issueNumber), url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` }); From 34b7ab8d8b78d5abc349f69014884c27676b42a7 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:27:25 +0000 Subject: [PATCH 50/59] chore: token handling --- src/helpers/format-chat-history.ts | 59 +++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 18 deletions(-) diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 380057b..84855c4 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -1,8 +1,9 @@ import { Context } from "../types"; -import { StreamlinedComment, StreamlinedComments } from "../types/llm"; +import { StreamlinedComment, StreamlinedComments, TokenLimits } from "../types/llm"; import { createKey, streamlineComments } from "../handlers/comments"; import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; import { pullReadmeFromRepoForIssue, splitKey } from "./issue"; +import { logger } from "./errors"; export async function formatChatHistory( context: Context, @@ -10,24 +11,37 @@ export async function formatChatHistory( specAndBodies: Record ): Promise { const keys = new Set([...Object.keys(streamlined), ...Object.keys(specAndBodies), createKey(context.payload.issue.html_url)]); - let runningTokenCount = 0; + const tokenLimits: TokenLimits = { + modelMaxTokenLimit: context.adapters.openai.completions.getModelMaxTokenLimit(context.config.model), + maxCompletionTokens: context.config.maxTokens || context.adapters.openai.completions.getModelMaxOutputLimit(context.config.model), + runningTokenCount: 0, + tokensRemaining: 0, + }; + + // minus the output tokens we have this many tokens to use + tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens; const chatHistory = await Promise.all( - Array.from(keys).map(async (key) => { + Array.from(keys).map(async (key, i) => { + if (tokenLimits.tokensRemaining < 0) { + logger.error(`Ran out of tokens at block ${i}`); + return ""; + } const [currentTokenCount, result] = await createContextBlockSection({ context, key, streamlined, specAndBodies, isCurrentIssue: key === createKey(context.payload.issue.html_url), - currentContextTokenCount: runningTokenCount, + tokenLimits, }); - runningTokenCount += currentTokenCount; + tokenLimits.runningTokenCount = currentTokenCount; + tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens - currentTokenCount; return result; }) ); - return Array.from(new Set(chatHistory)); + return Array.from(new Set(chatHistory)).filter((x): x is string => !!x); } function getCorrectHeaderString(prDiff: string | null, isCurrentIssue: boolean, isConvo: boolean) { @@ -66,14 +80,14 @@ async function createContextBlockSection({ streamlined, specAndBodies, isCurrentIssue, - currentContextTokenCount, + tokenLimits, }: { context: Context; key: string; streamlined: Record; specAndBodies: Record; isCurrentIssue: boolean; - currentContextTokenCount: number; + tokenLimits: TokenLimits; }): Promise<[number, string]> { let comments = streamlined[key]; if (!comments || comments.length === 0) { @@ -94,8 +108,7 @@ async function createContextBlockSection({ throw context.logger.error("Issue number is not valid"); } - const prDiff = await fetchPullRequestDiff(context, org, repo, issueNumber); - + const { diff } = await fetchPullRequestDiff(context, org, repo, issueNumber, tokenLimits); let specOrBody = specAndBodies[key]; if (!specOrBody) { specOrBody = @@ -109,8 +122,8 @@ async function createContextBlockSection({ )?.body || "No specification or body available"; } - const specHeader = getCorrectHeaderString(prDiff, isCurrentIssue, false); - const blockHeader = getCorrectHeaderString(prDiff, isCurrentIssue, true); + const specHeader = getCorrectHeaderString(diff, isCurrentIssue, false); + const blockHeader = getCorrectHeaderString(diff, isCurrentIssue, true); const specBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader, key)]; const commentSection = createComment({ issueNumber, repo, org, comments }, specOrBody); @@ -132,15 +145,25 @@ async function createContextBlockSection({ } } - if (!prDiff) { - currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(block.join("")); - return [currentContextTokenCount, block.join("\n")]; + if (!diff) { + return [await context.adapters.openai.completions.findTokenLength(block.join("")), block.join("\n")]; } - const blockWithDiff = [block.join("\n"), createHeader(`Pull Request Diff`, key), prDiff, createFooter(`Pull Request Diff`, key)]; - currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(blockWithDiff.join("")); - return [currentContextTokenCount, blockWithDiff.join("\n")]; + const blockWithDiff = [block.join("\n"), createHeader(`Pull Request Diff`, key), diff, createFooter(`Pull Request Diff`, key)]; + return [await context.adapters.openai.completions.findTokenLength(blockWithDiff.join("")), blockWithDiff.join("\n")]; +} + +/** + * Might not need to splice from the formatted window +function removeSections(fullText: string, header: string, footer: string): string { + const regex = new RegExp(`${escapeRegExp(header)}[\\s\\S]*?${escapeRegExp(footer)}`, 'g'); + return fullText.replace(regex, '').trim(); +} + +function escapeRegExp(text: string): string { + return text.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); } + */ function createHeader(content: string, repoString: string) { return `=== ${content} === ${repoString} ===\n`; From b877526f95397c7b248bdb2cdd40b7556c515fad Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:28:44 +0000 Subject: [PATCH 51/59] chore: diff fetch err handling --- src/helpers/issue-fetching.ts | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 8ed7a02..101ddcd 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,10 +1,11 @@ import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; import { IssueComments, FetchParams, Issue, LinkedIssues, LinkedPullsToIssue, ReviewComments, SimplifiedComment } from "../types/github-types"; -import { StreamlinedComment } from "../types/llm"; +import { StreamlinedComment, TokenLimits } from "../types/llm"; import { logger } from "./errors"; import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; +import { processPullRequestDiff } from "./pull-request-parsing"; /** * Recursively fetches linked issues and processes them, including fetching comments and specifications. @@ -56,12 +57,13 @@ export async function fetchLinkedIssues(params: FetchParams) { for (const comment of comments) { const foundIssues = idIssueFromComment(comment.body, params); const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; + if (foundIssues) { for (const linkedIssue of foundIssues) { const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); if (seen.has(linkedKey)) continue; - seen.add(linkedKey); + const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ context: params.context, issueNum: linkedIssue.issueNumber, @@ -125,22 +127,25 @@ export async function mergeCommentsAndFetchSpec( } } -export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number, tokenLimits: TokenLimits) { const { octokit } = context; + let diff: string; try { - const diff = await octokit.pulls.get({ + const diffResponse = await octokit.pulls.get({ owner: org, repo, pull_number: issue, - mediaType: { - format: "diff", - }, + mediaType: { format: "diff" }, }); - return diff.data as unknown as string; + + diff = diffResponse.data as unknown as string; } catch (e) { - return null; + logger.error(`Error fetching PR data`, { owner: org, repo, issue, err: String(e) }); + return { diff: null }; } + + return await processPullRequestDiff(diff, tokenLimits); } /** From 97868df8aec9b18fb16773006336b4a4ae81aa94 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:29:29 +0000 Subject: [PATCH 52/59] chore: pr parsing --- src/helpers/pull-request-parsing.ts | 244 ++++++++++++++++++++++++++++ 1 file changed, 244 insertions(+) create mode 100644 src/helpers/pull-request-parsing.ts diff --git a/src/helpers/pull-request-parsing.ts b/src/helpers/pull-request-parsing.ts new file mode 100644 index 0000000..fc61bbf --- /dev/null +++ b/src/helpers/pull-request-parsing.ts @@ -0,0 +1,244 @@ +import { encode } from "gpt-tokenizer"; +import { TokenLimits } from "../types/llm"; +import { logger } from "./errors"; +import { EncodeOptions } from "gpt-tokenizer/esm/GptEncoding"; + +export async function processPullRequestDiff(diff: string, tokenLimits: TokenLimits) { + const { runningTokenCount, tokensRemaining } = tokenLimits; + + const perFileDiffs = parsePerFileDiffs(diff); + + const essentialFileDiffs = perFileDiffs.filter(({ filename }) => { + return isEssentialFile(filename); + }); + + const estimatedFileDiffStats = essentialFileDiffs.map(({ filename, diffContent }) => { + const estimatedTokenCount = Math.ceil(diffContent.length / 3.5); + return { filename, estimatedTokenCount, diffContent }; + }); + + estimatedFileDiffStats.sort((a, b) => a.estimatedTokenCount - b.estimatedTokenCount); // Smallest first + + let currentTokenCount = runningTokenCount; + const includedFileDiffs = []; + + for (const file of estimatedFileDiffStats) { + if (currentTokenCount + file.estimatedTokenCount > tokensRemaining) { + logger.info(`Skipping ${file.filename} to stay within token limits.`); + continue; + } + includedFileDiffs.push(file); + currentTokenCount += file.estimatedTokenCount; + } + + if (includedFileDiffs.length === 0) { + logger.error(`Cannot include any files from diff without exceeding token limits.`); + return { diff: null }; + } + + const accurateFileDiffStats = await Promise.all( + includedFileDiffs.map(async (file) => { + const tokenCountArray = await encodeAsync(file.diffContent, { disallowedSpecial: new Set() }); + const tokenCount = tokenCountArray.length; + return { ...file, tokenCount }; + }) + ); + + currentTokenCount = accurateFileDiffStats.reduce((sum, file) => sum + file.tokenCount, runningTokenCount); + + while (currentTokenCount > tokensRemaining && accurateFileDiffStats.length > 0) { + const removedFile = accurateFileDiffStats.pop(); + currentTokenCount -= removedFile?.tokenCount || 0; + logger.info(`Excluded ${removedFile?.filename || "Unknown filename"} after accurate token count exceeded limits.`); + } + + if (accurateFileDiffStats.length === 0) { + logger.error(`Cannot include any files from diff after accurate token count calculation.`); + return { diff: null }; + } + + const currentDiff = accurateFileDiffStats.map((file) => file.diffContent).join("\n"); + + return { diff: currentDiff }; +} + +export async function encodeAsync(text: string, options: EncodeOptions): Promise { + return new Promise((resolve) => { + const result = encode(text, options); + resolve(result); + }); +} + +export function parsePerFileDiffs(diff: string): { filename: string; diffContent: string }[] { + const diffPattern = /^diff --git a\/(.*?) b\/.*$/gm; + let match: RegExpExecArray | null; + const perFileDiffs = []; + let lastIndex = 0; + + while ((match = diffPattern.exec(diff)) !== null) { + const filename = match[1]; + const startIndex = match.index; + + if (perFileDiffs.length > 0) { + perFileDiffs[perFileDiffs.length - 1].diffContent = diff.substring(lastIndex, startIndex).trim(); + } + perFileDiffs.push({ filename, diffContent: "" }); + lastIndex = startIndex; + } + if (perFileDiffs.length > 0 && lastIndex < diff.length) { + perFileDiffs[perFileDiffs.length - 1].diffContent = diff.substring(lastIndex).trim(); + } + + return perFileDiffs; +} + +function isEssentialFile(filename: string): boolean { + const nonEssentialExtensions = [ + // Image files + ".png", + ".jpg", + ".jpeg", + ".gif", + ".bmp", + ".tiff", + ".svg", + ".ico", + ".psd", + ".ai", + ".eps", + + // Video files + ".mp4", + ".avi", + ".mov", + ".wmv", + ".flv", + ".mkv", + ".webm", + ".mpeg", + ".mpg", + ".m4v", + + // Audio files + ".mp3", + ".wav", + ".flac", + ".aac", + ".ogg", + ".wma", + ".m4a", + ".aiff", + ".ape", + + // Document files + ".pdf", + ".doc", + ".docx", + ".xls", + ".xlsx", + ".ppt", + ".pptx", + ".odt", + ".ods", + ".odp", + + // Archive files + ".zip", + ".rar", + ".7z", + ".tar", + ".gz", + ".bz2", + ".xz", + ".lz", + ".z", + + // Executable and binary files + ".exe", + ".dll", + ".so", + ".dylib", + ".bin", + ".class", + ".jar", + ".war", + ".ear", + ".msi", + ".apk", + ".ipa", + + // Compiled object files + ".o", + ".obj", + ".pyc", + ".pyo", + ".pyd", + ".lib", + ".a", + ".dSYM", + + // System and temporary files + ".sys", + ".tmp", + ".bak", + ".old", + ".swp", + ".swo", + ".lock", + ".cfg", + ".ini", + + // Database files + ".db", + ".sqlite", + ".sqlite3", + ".mdb", + ".accdb", + ".dbf", + ".frm", + ".myd", + ".myi", + + // Font files + ".ttf", + ".otf", + ".woff", + ".woff2", + ".eot", + + // Backup and miscellaneous files + ".log", + ".bak", + ".orig", + ".sav", + ".save", + ".dump", + + // Other non-essential files + ".crt", + ".pem", + ".key", + ".csr", + ".der", // Certificate files + ".plist", + ".mobileprovision", // iOS specific files + ".icns", // macOS icon files + ".ds_store", + "thumbs.db", + "desktop.ini", // System files + + // Generated files + ".map", + ".min.js", + ".d.ts", + ".map.js", + ".map.css", + ".bundle.js", + ".bundle.css", + ".bundle.js.map", + ".bundle.css.map", + ".bundle.min.js", + ]; + + return !nonEssentialExtensions.some((ext) => filename.toLowerCase().endsWith(ext)); +} \ No newline at end of file From 398e9930eb134963d98d832e4bbd54196c5ec448 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:30:02 +0000 Subject: [PATCH 53/59] chore: type, tests, cspell --- .cspell.json | 6 +++++- src/adapters/openai/helpers/completions.ts | 2 +- src/handlers/ask-llm.ts | 2 +- src/types/llm.ts | 7 +++++++ tests/__mocks__/handlers.ts | 19 +++++++++++++++--- tests/main.test.ts | 23 ++++++++++++++++++++++ 6 files changed, 53 insertions(+), 6 deletions(-) diff --git a/.cspell.json b/.cspell.json index 65d0f95..06b57b6 100644 --- a/.cspell.json +++ b/.cspell.json @@ -34,7 +34,11 @@ "Typeguard", "typeguards", "OPENROUTER_API_KEY", - "Openrouter" + "Openrouter", + "flac", + "dylib", + "mobileprovision", + "icns" ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 52299e8..dfd2bcb 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -83,7 +83,7 @@ export class Completions extends SuperOpenAi { additionalContext.join("\n"), ].join("\n"); - logger.info(`System message: ${sysMsg}`); + // logger.info(`System message: ${sysMsg}`); logger.info(`Query: ${query}`); const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index c487420..0a48f46 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -25,7 +25,7 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - logger.info(`${formattedChat.join("")}`); + // logger.info(`${formattedChat.join("")}`); return await askLlm(context, question, formattedChat); } diff --git a/src/types/llm.ts b/src/types/llm.ts index f05e985..7d5bedf 100644 --- a/src/types/llm.ts +++ b/src/types/llm.ts @@ -51,3 +51,10 @@ export type StreamlinedComments = { org: string; comments: StreamlinedComment[]; }; + +export type TokenLimits = { + modelMaxTokenLimit: number; + maxCompletionTokens: number; + runningTokenCount: number; + tokensRemaining: number; +}; diff --git a/tests/__mocks__/handlers.ts b/tests/__mocks__/handlers.ts index be7ba62..2c2141b 100644 --- a/tests/__mocks__/handlers.ts +++ b/tests/__mocks__/handlers.ts @@ -85,9 +85,7 @@ export const handlers = [ db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pullNumber) } } }) ) ), - http.get("https://api.github.com/repos/:owner/:repo/languages", ({ params: { owner, repo } }) => - HttpResponse.json(db.repo.findFirst({ where: { owner: { login: { equals: owner as string } }, name: { equals: repo as string } } })) - ), + http.get("https://api.github.com/repos/:owner/:repo/languages", () => HttpResponse.json(["JavaScript", "TypeScript", "Python"])), http.get("https://api.github.com/repos/:owner/:repo/contents/:path", () => HttpResponse.json({ type: "file", @@ -97,4 +95,19 @@ export const handlers = [ content: Buffer.from(JSON.stringify({ content: "This is a mock README file" })).toString("base64"), }) ), + // [MSW] Warning: intercepted a request without a matching request handler: + + // • GET https://api.github.com/repos/ubiquity/test-repo/pulls/3/files?per_page=100?per_page=100 + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/files", () => + HttpResponse.json([ + { + sha: "abc123", + filename: "file1.txt", + status: "modified", + additions: 10, + deletions: 5, + changes: 15, + }, + ]) + ), ]; diff --git a/tests/main.test.ts b/tests/main.test.ts index 5f67bd9..64c1063 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -39,6 +39,23 @@ type Comment = { const octokit = jest.requireActual("@octokit/rest"); jest.requireActual("openai"); +// extractDependencies + +jest.mock("../src/handlers/ground-truths/chat-bot", () => { + return { + fetchRepoDependencies: jest.fn().mockReturnValue({ + dependencies: {}, + devDependencies: {}, + }), + extractDependencies: jest.fn(), + // [string, number][] + fetchRepoLanguageStats: jest.fn().mockReturnValue([ + ["JavaScript", 100], + ["TypeScript", 200], + ]), + }; +}); + beforeAll(() => { server.listen(); }); @@ -388,6 +405,12 @@ function createContext(body = TEST_SLASH_COMMAND) { }, openai: { completions: { + getModelMaxTokenLimit: () => { + return 50000; + }, + getModelMaxOutputLimit: () => { + return 50000; + }, createCompletion: async (): Promise => { return { answer: MOCK_ANSWER, From 0324dc9a46670b8164eb386cf3c563a089a09f1f Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 22:09:12 +0000 Subject: [PATCH 54/59] chore: remove jsdoc comments, add helpful comments --- src/adapters/openai/helpers/completions.ts | 3 +- src/handlers/ask-llm.ts | 26 ++++++-------- src/handlers/comment-created-callback.ts | 1 + src/helpers/format-chat-history.ts | 40 +++++++++++---------- src/helpers/issue-fetching.ts | 40 +++------------------ src/helpers/issue-handling.ts | 42 +--------------------- src/helpers/pull-request-parsing.ts | 19 +++++++++- 7 files changed, 59 insertions(+), 112 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index dfd2bcb..42b61db 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -83,7 +83,7 @@ export class Completions extends SuperOpenAi { additionalContext.join("\n"), ].join("\n"); - // logger.info(`System message: ${sysMsg}`); + logger.info(`System message: ${sysMsg}`); logger.info(`Query: ${query}`); const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ @@ -165,6 +165,7 @@ export class Completions extends SuperOpenAi { } async findTokenLength(prompt: string, additionalContext: string[] = [], localContext: string[] = [], groundTruths: string[] = []): Promise { + // disallowedSpecial: new Set() because we pass the entire diff as the prompt we should account for all special characters return encode(prompt + additionalContext.join("\n") + localContext.join("\n") + groundTruths.join("\n"), { disallowedSpecial: new Set() }).length; } } diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index 0a48f46..8f6790b 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -8,34 +8,23 @@ import { fetchRepoDependencies, fetchRepoLanguageStats } from "./ground-truths/c import { findGroundTruths } from "./ground-truths/find-ground-truths"; import { bubbleUpErrorComment, logger } from "../helpers/errors"; -/** - * Asks a question to GPT and returns the response - * @param context - The context object containing environment and configuration details - * @param question - The question to ask GPT - * @returns The response from GPT - * @throws If no question is provided - */ export async function askQuestion(context: Context, question: string) { if (!question) { - throw context.logger.error("No question provided"); + throw logger.error("No question provided"); } + // using any links in comments or issue/pr bodies to fetch more context const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context, owner: context.payload.repository.owner.login, repo: context.payload.repository.name, }); + // build a nicely structure system message containing a streamlined chat history + // includes the current issue, any linked issues, and any linked PRs const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - // logger.info(`${formattedChat.join("")}`); + logger.info(`${formattedChat.join("")}`); return await askLlm(context, question, formattedChat); } -/** - * Asks GPT a question and returns the completions - * @param context - The context object containing environment and configuration details - * @param question - The question to ask GPT - * @param formattedChat - The formatted chat history to provide context to GPT - * @returns completions - The completions generated by GPT - **/ export async function askLlm(context: Context, question: string, formattedChat: string[]): Promise { const { env: { UBIQUITY_OS_APP_NAME }, @@ -48,19 +37,24 @@ export async function askLlm(context: Context, question: string, formattedChat: } = context; try { + // using db functions to find similar comments and issues const [similarComments, similarIssues] = await Promise.all([ comment.findSimilarComments(question, 1 - similarityThreshold, ""), issue.findSimilarIssues(question, 1 - similarityThreshold, ""), ]); + // combine the similar comments and issues into a single array const similarText = [ ...(similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || []), ...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []), ]; + // filter out any empty strings formattedChat = formattedChat.filter((text) => text); + // rerank the similar text using voyageai const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; + // gather structural data about the payload repository const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); let groundTruths: string[] = []; diff --git a/src/handlers/comment-created-callback.ts b/src/handlers/comment-created-callback.ts index b11c36c..a45770d 100644 --- a/src/handlers/comment-created-callback.ts +++ b/src/handlers/comment-created-callback.ts @@ -35,6 +35,7 @@ export async function issueCommentCreatedCallback( } const metadataString = createStructuredMetadata( + // don't change this header, it's used for tracking "ubiquity-os-llm-response", logger.info(`Answer: ${answer}`, { metadata: { diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 84855c4..dc38288 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -10,6 +10,7 @@ export async function formatChatHistory( streamlined: Record, specAndBodies: Record ): Promise { + // At this point really we should have all the context we can obtain but we try again just in case const keys = new Set([...Object.keys(streamlined), ...Object.keys(specAndBodies), createKey(context.payload.issue.html_url)]); const tokenLimits: TokenLimits = { modelMaxTokenLimit: context.adapters.openai.completions.getModelMaxTokenLimit(context.config.model), @@ -18,11 +19,14 @@ export async function formatChatHistory( tokensRemaining: 0, }; - // minus the output tokens we have this many tokens to use + // what we start out with tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens; + // careful adding any more API calls here as it's likely to hit the secondary rate limit const chatHistory = await Promise.all( + // keys are owner/repo/issueNum; so for each issue, we want to create a block Array.from(keys).map(async (key, i) => { + // if we run out of tokens, we should stop if (tokenLimits.tokensRemaining < 0) { logger.error(`Ran out of tokens at block ${i}`); return ""; @@ -35,6 +39,7 @@ export async function formatChatHistory( isCurrentIssue: key === createKey(context.payload.issue.html_url), tokenLimits, }); + // update the token count tokenLimits.runningTokenCount = currentTokenCount; tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens - currentTokenCount; return result; @@ -44,6 +49,7 @@ export async function formatChatHistory( return Array.from(new Set(chatHistory)).filter((x): x is string => !!x); } +// These give structure and provide the distinction between the different sections of the chat history function getCorrectHeaderString(prDiff: string | null, isCurrentIssue: boolean, isConvo: boolean) { const strings = { convo: { @@ -90,7 +96,8 @@ async function createContextBlockSection({ tokenLimits: TokenLimits; }): Promise<[number, string]> { let comments = streamlined[key]; - if (!comments || comments.length === 0) { + // just in case we try again but we should already have the comments + if (!comments || !comments.length) { const [owner, repo, number] = splitKey(key); const { comments: fetchedComments } = await fetchIssueComments({ context, @@ -98,7 +105,6 @@ async function createContextBlockSection({ repo, issueNum: parseInt(number), }); - comments = streamlineComments(fetchedComments)[key]; } @@ -108,8 +114,11 @@ async function createContextBlockSection({ throw context.logger.error("Issue number is not valid"); } + // Fetch our diff if we have one; this excludes the largest of files to keep within token limits const { diff } = await fetchPullRequestDiff(context, org, repo, issueNumber, tokenLimits); + // specification or pull request body let specOrBody = specAndBodies[key]; + // we should have it already but just in case if (!specOrBody) { specOrBody = ( @@ -122,23 +131,27 @@ async function createContextBlockSection({ )?.body || "No specification or body available"; } - const specHeader = getCorrectHeaderString(diff, isCurrentIssue, false); - const blockHeader = getCorrectHeaderString(diff, isCurrentIssue, true); + const specHeader = getCorrectHeaderString(diff, isCurrentIssue, false); //E.g: === Current Task Specification === + const blockHeader = getCorrectHeaderString(diff, isCurrentIssue, true); //E.g: === Linked Task Conversation === + // contains the actual spec or body const specBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader, key)]; + // contains the conversation const commentSection = createComment({ issueNumber, repo, org, comments }, specOrBody); let block; + // if we have a conversation, we should include it if (commentSection) { block = [specBlock.join("\n"), createHeader(blockHeader, key), commentSection, createFooter(blockHeader, key)]; } else { - // in this scenario we have no task/PR conversation, just the spec + // No need for empty sections in the chat history block = [specBlock.join("\n")]; } // only inject the README if this is the current issue as that's likely most relevant if (isCurrentIssue) { const readme = await pullReadmeFromRepoForIssue({ context, owner: org, repo }); + // give the readme it's own clear section if (readme) { const readmeBlock = readme ? [createHeader("README", key), createSpecOrBody(readme), createFooter("README", key)] : []; block = block.concat(readmeBlock); @@ -146,25 +159,15 @@ async function createContextBlockSection({ } if (!diff) { + // the diff was already encoded etc but we have added more to the block so we need to re-encode return [await context.adapters.openai.completions.findTokenLength(block.join("")), block.join("\n")]; } + // Build the block with the diff in it's own section const blockWithDiff = [block.join("\n"), createHeader(`Pull Request Diff`, key), diff, createFooter(`Pull Request Diff`, key)]; return [await context.adapters.openai.completions.findTokenLength(blockWithDiff.join("")), blockWithDiff.join("\n")]; } -/** - * Might not need to splice from the formatted window -function removeSections(fullText: string, header: string, footer: string): string { - const regex = new RegExp(`${escapeRegExp(header)}[\\s\\S]*?${escapeRegExp(footer)}`, 'g'); - return fullText.replace(regex, '').trim(); -} - -function escapeRegExp(text: string): string { - return text.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); -} - */ - function createHeader(content: string, repoString: string) { return `=== ${content} === ${repoString} ===\n`; } @@ -184,6 +187,7 @@ function createComment(comment: StreamlinedComments, specOrBody: string) { const seen = new Set(); comment.comments = comment.comments.filter((c) => { + // Do not include the same comment twice or the spec/body if (seen.has(c.id) || c.body === specOrBody) { return false; } diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 101ddcd..4432067 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -7,28 +7,21 @@ import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, idIssueFromComment import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; import { processPullRequestDiff } from "./pull-request-parsing"; -/** - * Recursively fetches linked issues and processes them, including fetching comments and specifications. - * - * @param params - The parameters required to fetch the linked issues, including context and other details. - * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. - */ export async function recursivelyFetchLinkedIssues(params: FetchParams) { + // take a first run at gathering everything we need and package it up const { linkedIssues, seen, specAndBodies, streamlinedComments } = await fetchLinkedIssues(params); + // build promises and throttle them; this calls handleSpec which is a recursive function potentially to great depth const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); await throttlePromises(fetchPromises, 10); + // handle the keys that have been gathered const linkedIssuesKeys = linkedIssues.map((issue) => createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`)); + // exhaustive list of unique keys from the first full pass const specAndBodyKeys = Array.from(new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); + // this fn throttles from within but again, be weary of the rate limit await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); return { linkedIssues, specAndBodies, streamlinedComments }; } -/** - * Fetches linked issues recursively and processes them. - * - * @param params - The parameters required to fetch the linked issues, including context and other details. - * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. - */ export async function fetchLinkedIssues(params: FetchParams) { const { comments, issue } = await fetchIssueComments(params); if (!issue) { @@ -100,15 +93,6 @@ export async function fetchLinkedIssues(params: FetchParams) { return { streamlinedComments, linkedIssues, specAndBodies, seen }; } -/** - * Merges comments and fetches the specification for a linked issue. - * - * @param params - The parameters required to fetch the linked issue, including context and other details. - * @param linkedIssue - The linked issue for which comments and specifications need to be fetched. - * @param streamlinedComments - A record of streamlined comments associated with issues. - * @param specOrBodies - A record of specifications or bodies associated with issues. - * @param seen - A set of issue keys that have already been processed to avoid duplication. - */ export async function mergeCommentsAndFetchSpec( params: FetchParams, linkedIssue: LinkedIssues, @@ -148,11 +132,6 @@ export async function fetchPullRequestDiff(context: Context, org: string, repo: return await processPullRequestDiff(diff, tokenLimits); } -/** - * Fetches an issue from the GitHub API. - * @param params - Context - * @returns A promise that resolves to an issue object or null if an error occurs. - */ export async function fetchIssue(params: FetchParams): Promise { const { octokit, payload, logger } = params.context; const { issueNum, owner, repo } = params; @@ -227,15 +206,6 @@ export async function fetchIssueComments(params: FetchParams) { }; } -/** - * Fetches and handles an issue based on the provided key and parameters. - * - * @param key - The unique key representing the issue in the format "owner/repo/issueNumber". - * @param params - The parameters required to fetch the issue, including context and other details. - * @param streamlinedComments - A record of streamlined comments associated with issues. - * @param seen - A set of issue keys that have already been processed to avoid duplication. - * @returns A promise that resolves to an array of streamlined comments for the specified issue. - */ export async function fetchAndHandleIssue( key: string, params: FetchParams, diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 055f1dd..779cb26 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -4,14 +4,6 @@ import { StreamlinedComment } from "../types/llm"; import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, mergeCommentsAndFetchSpec } from "./issue-fetching"; -/** - * Handles the processing of an issue. - * - * @param params - The parameters required to fetch and handle issues. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @param alreadySeen - A set of keys that have already been processed to avoid duplication. - * @returns A promise that resolves when the issue has been handled. - */ export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen: Set) { if (alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { return; @@ -22,17 +14,6 @@ export async function handleIssue(params: FetchParams, streamlinedComments: Reco return mergeStreamlinedComments(streamlinedComments, streamlined); } -/** - * Handles the processing of a specification or body text. - * - * @param params - The parameters required to fetch and handle issues. - * @param specOrBody - The specification or body text to be processed. - * @param specAndBodies - A record of specifications and bodies indexed by keys. - * @param key - The key associated with the current specification or body. - * @param seen - A set of keys that have already been processed to avoid duplication. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @returns A promise that resolves to the updated record of specifications and bodies. - */ export async function handleSpec( params: FetchParams, specOrBody: string, @@ -73,14 +54,6 @@ export async function handleSpec( return specAndBodies; } -/** - * Handles the processing of a comment. - * - * @param params - The parameters required to fetch and handle issues. - * @param comment - The comment to be processed. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @param seen - A set of keys that have already been processed to avoid duplication. - */ export async function handleComment( params: FetchParams, comment: StreamlinedComment, @@ -100,15 +73,8 @@ export async function handleComment( } } -/** - * Handles the processing of specification and body keys. - * - * @param keys - An array of keys representing issues or comments to be processed. - * @param params - The parameters required to fetch and handle issues. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @param seen - A set of keys that have already been processed to avoid duplication. - */ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { + // Make one last sweep just to be sure we have everything const commentProcessingPromises = keys.map(async (key) => { let comments = streamlinedComments[key]; if (!comments || comments.length === 0) { @@ -122,12 +88,6 @@ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, await throttlePromises(commentProcessingPromises, 10); } -/** - * Throttles the execution of promises to ensure that no more than the specified limit are running concurrently. - * - * @param promises - An array of promises to be executed. - * @param limit - The maximum number of promises to run concurrently. - */ export async function throttlePromises(promises: Promise[], limit: number) { const executing: Promise[] = []; for (const promise of promises) { diff --git a/src/helpers/pull-request-parsing.ts b/src/helpers/pull-request-parsing.ts index fc61bbf..efa307b 100644 --- a/src/helpers/pull-request-parsing.ts +++ b/src/helpers/pull-request-parsing.ts @@ -6,12 +6,15 @@ import { EncodeOptions } from "gpt-tokenizer/esm/GptEncoding"; export async function processPullRequestDiff(diff: string, tokenLimits: TokenLimits) { const { runningTokenCount, tokensRemaining } = tokenLimits; + // parse the diff into per-file diffs for quicker processing const perFileDiffs = parsePerFileDiffs(diff); + // filter out obviously non-essential files; .png, .jpg, .pdf, etc. const essentialFileDiffs = perFileDiffs.filter(({ filename }) => { return isEssentialFile(filename); }); + // quick estimate using a simple heuristic; 3.5 characters per token const estimatedFileDiffStats = essentialFileDiffs.map(({ filename, diffContent }) => { const estimatedTokenCount = Math.ceil(diffContent.length / 3.5); return { filename, estimatedTokenCount, diffContent }; @@ -22,6 +25,7 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim let currentTokenCount = runningTokenCount; const includedFileDiffs = []; + // Using the quick estimate, include as many files as possible without exceeding token limits for (const file of estimatedFileDiffStats) { if (currentTokenCount + file.estimatedTokenCount > tokensRemaining) { logger.info(`Skipping ${file.filename} to stay within token limits.`); @@ -31,11 +35,13 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim currentTokenCount += file.estimatedTokenCount; } + // If no files can be included, return null if (includedFileDiffs.length === 0) { logger.error(`Cannot include any files from diff without exceeding token limits.`); return { diff: null }; } + // Accurately calculate token count for included files we have approximated to be under the limit const accurateFileDiffStats = await Promise.all( includedFileDiffs.map(async (file) => { const tokenCountArray = await encodeAsync(file.diffContent, { disallowedSpecial: new Set() }); @@ -44,8 +50,10 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim }) ); + // Take an accurate reading of our current collection of files within the diff currentTokenCount = accurateFileDiffStats.reduce((sum, file) => sum + file.tokenCount, runningTokenCount); + // Remove files from the end of the list until we are within token limits while (currentTokenCount > tokensRemaining && accurateFileDiffStats.length > 0) { const removedFile = accurateFileDiffStats.pop(); currentTokenCount -= removedFile?.tokenCount || 0; @@ -57,11 +65,13 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim return { diff: null }; } + // Build the diff with the included files const currentDiff = accurateFileDiffStats.map((file) => file.diffContent).join("\n"); return { diff: currentDiff }; } +// Helper to speed up tokenization export async function encodeAsync(text: string, options: EncodeOptions): Promise { return new Promise((resolve) => { const result = encode(text, options); @@ -69,22 +79,28 @@ export async function encodeAsync(text: string, options: EncodeOptions): Promise }); } +// Helper to parse a diff into per-file diffs export function parsePerFileDiffs(diff: string): { filename: string; diffContent: string }[] { + // regex to capture diff sections, including the last file const diffPattern = /^diff --git a\/(.*?) b\/.*$/gm; let match: RegExpExecArray | null; const perFileDiffs = []; let lastIndex = 0; + // iterate over each file in the diff while ((match = diffPattern.exec(diff)) !== null) { const filename = match[1]; const startIndex = match.index; + // if we have pushed a file into the array, "append" the diff content if (perFileDiffs.length > 0) { perFileDiffs[perFileDiffs.length - 1].diffContent = diff.substring(lastIndex, startIndex).trim(); } + perFileDiffs.push({ filename, diffContent: "" }); lastIndex = startIndex; } + // append the last file's diff content if (perFileDiffs.length > 0 && lastIndex < diff.length) { perFileDiffs[perFileDiffs.length - 1].diffContent = diff.substring(lastIndex).trim(); } @@ -92,6 +108,7 @@ export function parsePerFileDiffs(diff: string): { filename: string; diffContent return perFileDiffs; } +// This speeds things up considerably by skipping non-readable/non-relevant files function isEssentialFile(filename: string): boolean { const nonEssentialExtensions = [ // Image files @@ -241,4 +258,4 @@ function isEssentialFile(filename: string): boolean { ]; return !nonEssentialExtensions.some((ext) => filename.toLowerCase().endsWith(ext)); -} \ No newline at end of file +} From f4332f98becf326df7b75a49b462dd2be7cdee17 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 22:30:47 +0000 Subject: [PATCH 55/59] chore: remove unused gql fetch --- src/helpers/issue-fetching.ts | 39 +---------------------------------- 1 file changed, 1 insertion(+), 38 deletions(-) diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 4432067..45e049f 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -250,41 +250,4 @@ function castCommentsToSimplifiedComments(comments: (IssueComments | ReviewComme throw logger.error("Comment type not recognized", { comment, params }); }); -} - -export async function fetchLinkedPullRequests(owner: string, repo: string, issueNumber: number, context: Context) { - const query = ` - query($owner: String!, $repo: String!, $issueNumber: Int!) { - repository(owner: $owner, name: $repo) { - issue(number: $issueNumber) { - closedByPullRequestsReferences(first: 100) { - nodes { - number - title - state - merged - url - } - } - } - } - } - `; - - try { - const { repository } = await context.octokit.graphql(query, { - owner, - repo, - issueNumber, - }); - return repository.issue.closedByPullRequestsReferences.nodes; - } catch (error) { - context.logger.error(`Error fetching linked PRs from issue`, { - err: error, - owner, - repo, - issueNumber, - }); - return null; - } -} +} \ No newline at end of file From a6ffb03ff005b47b6ab15c40683a0381dbcc4c41 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 23:50:21 +0000 Subject: [PATCH 56/59] chore: remove exclusion by file ext --- src/helpers/pull-request-parsing.ts | 159 +--------------------------- 1 file changed, 1 insertion(+), 158 deletions(-) diff --git a/src/helpers/pull-request-parsing.ts b/src/helpers/pull-request-parsing.ts index efa307b..87a86e6 100644 --- a/src/helpers/pull-request-parsing.ts +++ b/src/helpers/pull-request-parsing.ts @@ -9,13 +9,8 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim // parse the diff into per-file diffs for quicker processing const perFileDiffs = parsePerFileDiffs(diff); - // filter out obviously non-essential files; .png, .jpg, .pdf, etc. - const essentialFileDiffs = perFileDiffs.filter(({ filename }) => { - return isEssentialFile(filename); - }); - // quick estimate using a simple heuristic; 3.5 characters per token - const estimatedFileDiffStats = essentialFileDiffs.map(({ filename, diffContent }) => { + const estimatedFileDiffStats = perFileDiffs.map(({ filename, diffContent }) => { const estimatedTokenCount = Math.ceil(diffContent.length / 3.5); return { filename, estimatedTokenCount, diffContent }; }); @@ -107,155 +102,3 @@ export function parsePerFileDiffs(diff: string): { filename: string; diffContent return perFileDiffs; } - -// This speeds things up considerably by skipping non-readable/non-relevant files -function isEssentialFile(filename: string): boolean { - const nonEssentialExtensions = [ - // Image files - ".png", - ".jpg", - ".jpeg", - ".gif", - ".bmp", - ".tiff", - ".svg", - ".ico", - ".psd", - ".ai", - ".eps", - - // Video files - ".mp4", - ".avi", - ".mov", - ".wmv", - ".flv", - ".mkv", - ".webm", - ".mpeg", - ".mpg", - ".m4v", - - // Audio files - ".mp3", - ".wav", - ".flac", - ".aac", - ".ogg", - ".wma", - ".m4a", - ".aiff", - ".ape", - - // Document files - ".pdf", - ".doc", - ".docx", - ".xls", - ".xlsx", - ".ppt", - ".pptx", - ".odt", - ".ods", - ".odp", - - // Archive files - ".zip", - ".rar", - ".7z", - ".tar", - ".gz", - ".bz2", - ".xz", - ".lz", - ".z", - - // Executable and binary files - ".exe", - ".dll", - ".so", - ".dylib", - ".bin", - ".class", - ".jar", - ".war", - ".ear", - ".msi", - ".apk", - ".ipa", - - // Compiled object files - ".o", - ".obj", - ".pyc", - ".pyo", - ".pyd", - ".lib", - ".a", - ".dSYM", - - // System and temporary files - ".sys", - ".tmp", - ".bak", - ".old", - ".swp", - ".swo", - ".lock", - ".cfg", - ".ini", - - // Database files - ".db", - ".sqlite", - ".sqlite3", - ".mdb", - ".accdb", - ".dbf", - ".frm", - ".myd", - ".myi", - - // Font files - ".ttf", - ".otf", - ".woff", - ".woff2", - ".eot", - - // Backup and miscellaneous files - ".log", - ".bak", - ".orig", - ".sav", - ".save", - ".dump", - - // Other non-essential files - ".crt", - ".pem", - ".key", - ".csr", - ".der", // Certificate files - ".plist", - ".mobileprovision", // iOS specific files - ".icns", // macOS icon files - ".ds_store", - "thumbs.db", - "desktop.ini", // System files - - // Generated files - ".map", - ".min.js", - ".d.ts", - ".map.js", - ".map.css", - ".bundle.js", - ".bundle.css", - ".bundle.js.map", - ".bundle.css.map", - ".bundle.min.js", - ]; - - return !nonEssentialExtensions.some((ext) => filename.toLowerCase().endsWith(ext)); -} From a5221c79e376b68085a706a85b8abe3c50756eb9 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 31 Oct 2024 23:58:59 +0000 Subject: [PATCH 57/59] chore: type null, filter null --- src/helpers/format-chat-history.ts | 2 +- src/helpers/issue-fetching.ts | 15 +++++++++------ src/types/github-types.ts | 16 +++------------- 3 files changed, 13 insertions(+), 20 deletions(-) diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index dc38288..bad5d7f 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -182,7 +182,7 @@ function createSpecOrBody(specOrBody: string) { function createComment(comment: StreamlinedComments, specOrBody: string) { if (!comment.comments) { - return ""; + return null; } const seen = new Set(); diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 45e049f..3c8d73a 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,6 +1,6 @@ import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; -import { IssueComments, FetchParams, Issue, LinkedIssues, LinkedPullsToIssue, ReviewComments, SimplifiedComment } from "../types/github-types"; +import { IssueComments, FetchParams, Issue, LinkedIssues, ReviewComments, SimplifiedComment } from "../types/github-types"; import { StreamlinedComment, TokenLimits } from "../types/llm"; import { logger } from "./errors"; import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; @@ -23,8 +23,8 @@ export async function recursivelyFetchLinkedIssues(params: FetchParams) { } export async function fetchLinkedIssues(params: FetchParams) { - const { comments, issue } = await fetchIssueComments(params); - if (!issue) { + const fetchedIssueAndComments = await fetchIssueComments(params); + if (!fetchedIssueAndComments.issue) { return { streamlinedComments: {}, linkedIssues: [], specAndBodies: {}, seen: new Set() }; } @@ -32,14 +32,17 @@ export async function fetchLinkedIssues(params: FetchParams) { throw logger.error("Owner or repo not found"); } + const issue = fetchedIssueAndComments.issue; + const comments = fetchedIssueAndComments.comments.filter((comment) => comment.body !== undefined); + const issueKey = createKey(issue.html_url); const [owner, repo, issueNumber] = splitKey(issueKey); - const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; + const linkedIssues: LinkedIssues[] = [{ body: issue.body, comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; const specAndBodies: Record = {}; const seen = new Set([issueKey]); comments.push({ - body: issue.body || "", + body: issue.body, user: issue.user, id: issue.id.toString(), org: params.owner, @@ -250,4 +253,4 @@ function castCommentsToSimplifiedComments(comments: (IssueComments | ReviewComme throw logger.error("Comment type not recognized", { comment, params }); }); -} \ No newline at end of file +} diff --git a/src/types/github-types.ts b/src/types/github-types.ts index 2830da7..590f5d6 100644 --- a/src/types/github-types.ts +++ b/src/types/github-types.ts @@ -19,12 +19,12 @@ export type LinkedIssues = { owner: string; url: string; comments?: SimplifiedComment[] | null | undefined; - body: string | undefined; + body: string | undefined | null; }; export type SimplifiedComment = { user: Partial | null; - body: string | undefined; + body: string | undefined | null; id: string; org: string; repo: string; @@ -47,14 +47,4 @@ export type FetchedPulls = { state: string; merged: boolean; url: string; -}; - -export type LinkedPullsToIssue = { - repository: { - issue: { - closedByPullRequestsReferences: { - nodes: FetchedPulls[]; - }; - }; - }; -}; +}; \ No newline at end of file From 27184779acc1946363c61c80d60d81423a3e522a Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 1 Nov 2024 00:00:58 +0000 Subject: [PATCH 58/59] chore: remove unused type --- src/types/github-types.ts | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/types/github-types.ts b/src/types/github-types.ts index 590f5d6..af351f3 100644 --- a/src/types/github-types.ts +++ b/src/types/github-types.ts @@ -39,12 +39,4 @@ export type FetchedCodes = { org: string; repo: string; issueNumber: number; -}; - -export type FetchedPulls = { - number: number; - title: string; - state: string; - merged: boolean; - url: string; }; \ No newline at end of file From 91049454595c56772dbb8f31cc1451237607dffc Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sun, 3 Nov 2024 23:17:18 +0000 Subject: [PATCH 59/59] chore: push old and merge missing ctx fixes --- .cspell.json | 27 +-- package.json | 2 +- .../helpers/append-to-base-chat-history.ts | 4 +- src/adapters/openai/helpers/completions.ts | 87 +++------- .../openai/helpers/create-system-msg.ts | 14 +- src/adapters/openai/helpers/prompts.ts | 2 +- src/adapters/openai/types.ts | 6 +- src/handlers/ask-llm.ts | 159 ++++++++++-------- src/handlers/comment-created-callback.ts | 4 +- src/handlers/llm-query-output.ts | 6 +- src/types/github-types.ts | 2 +- src/types/llm.ts | 8 +- tests/main.test.ts | 4 +- 13 files changed, 138 insertions(+), 187 deletions(-) diff --git a/.cspell.json b/.cspell.json index ed9a1f5..ff444a7 100644 --- a/.cspell.json +++ b/.cspell.json @@ -1,14 +1,7 @@ { "$schema": "https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json", "version": "0.2", - "ignorePaths": [ - "**/*.json", - "**/*.css", - "node_modules", - "**/*.log", - "./src/adapters/supabase/**/**.ts", - "/supabase/*" - ], + "ignorePaths": ["**/*.json", "**/*.css", "node_modules", "**/*.log", "./src/adapters/supabase/**/**.ts", "/supabase/*"], "useGitignore": true, "language": "en", "words": [ @@ -48,17 +41,7 @@ "mobileprovision", "icns" ], - "dictionaries": [ - "typescript", - "node", - "software-terms" - ], - "import": [ - "@cspell/dict-typescript/cspell-ext.json", - "@cspell/dict-node/cspell-ext.json", - "@cspell/dict-software-terms" - ], - "ignoreRegExpList": [ - "[0-9a-fA-F]{6}" - ] -} \ No newline at end of file + "dictionaries": ["typescript", "node", "software-terms"], + "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], + "ignoreRegExpList": ["[0-9a-fA-F]{6}"] +} diff --git a/package.json b/package.json index 66487f1..34a965c 100644 --- a/package.json +++ b/package.json @@ -87,4 +87,4 @@ "@commitlint/config-conventional" ] } -} \ No newline at end of file +} diff --git a/src/adapters/openai/helpers/append-to-base-chat-history.ts b/src/adapters/openai/helpers/append-to-base-chat-history.ts index 472bf39..ca59cd6 100644 --- a/src/adapters/openai/helpers/append-to-base-chat-history.ts +++ b/src/adapters/openai/helpers/append-to-base-chat-history.ts @@ -2,7 +2,7 @@ import { createSystemMessage } from "./create-system-msg"; import { ChatHistory, CreationParams, ToolCallResponse } from "../types"; export function appendToConversation(params: CreationParams, toolCallsToAppend: ToolCallResponse[] = []): ChatHistory { - const { systemMessage, prompt, additionalContext, localContext, groundTruths, botName } = params; + const { systemMessage, query, additionalContext, localContext, groundTruths, botName } = params; const baseChat: ChatHistory = [ { role: "system", @@ -18,7 +18,7 @@ export function appendToConversation(params: CreationParams, toolCallsToAppend: content: [ { type: "text", - text: prompt, + text: query, }, ], }, diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index bf991d8..bd7c980 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -5,20 +5,9 @@ import { logger } from "../../../helpers/errors"; import { appendToConversation } from "./append-to-base-chat-history"; import { getAnswerAndTokenUsage } from "./get-answer-and-token-usage"; import { CreationParams, ResponseFromLlm, ToolCallResponse } from "../types"; -import { MAX_COMPLETION_TOKENS } from "../constants"; import { CompletionsModelHelper, ModelApplications } from "../../../types/llm"; import { encode } from "gpt-tokenizer"; -export interface CompletionsType { - answer: string; - groundTruths: string[]; - tokenUsage: { - input: number; - output: number; - total: number; - }; -} - export class Completions extends SuperOpenAi { protected context: Context; @@ -63,73 +52,38 @@ export class Completions extends SuperOpenAi { return this.getModelMaxTokenLimit("o1-mini"); } - async createCompletion( - { - query, - model, + params: { + systemMessage: string; + query: string; + model: string; + additionalContext: string[]; + localContext: string[]; + groundTruths: string[]; + botName: string; + maxTokens: number; + }, + chatHistory?: OpenAI.Chat.Completions.ChatCompletionMessageParam[] + ): Promise { + const { query, model, additionalContext, localContext, groundTruths, botName, maxTokens } = params; + logger.info(`Creating completion for model: ${model} with query: ${query}`); + logger.info(`Context for completion:`, { additionalContext, localContext, groundTruths, botName, - maxTokens, - }: { - query: string, - model: string, - additionalContext: string[], - localContext: string[], - groundTruths: string[], - botName: string, - maxTokens: number - } - ): Promise { - const numTokens = await this.findTokenLength(query, additionalContext, localContext, groundTruths); - logger.info(`Number of tokens: ${numTokens}`); - - const sysMsg = [ - "You Must obey the following ground truths: ", - JSON.stringify(groundTruths) + "\n", - "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.", - `Your name is: ${botName}`, - "\n", - "Main Context (Provide additional precedence in terms of information): ", - localContext.join("\n"), - "Secondary Context: ", - additionalContext.join("\n"), - ].join("\n"); - - logger.info(`System message: ${sysMsg}`); - logger.info(`Query: ${query}`); + }); const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ // tools: LLM_TOOLS, might not be a good idea to have this available for the general chatbot model: model, - messages: [ - { - role: "system", - content: [ - { - type: "text", - text: sysMsg, - }, - ], - }, - { - role: "user", - content: [ - { - type: "text", - text: query, - }, - ], - }, - ], + messages: chatHistory || appendToConversation(params), temperature: 0.2, // This value is now deprecated in favor of max_completion_tokens, and is not compatible with o1 series models. // max_COMPLETION_tokens: MAX_COMPLETION_TOKENS, /**An upper bound for the number of tokens that can be generated for a completion, including visible output tokens and reasoning tokens. */ - max_completion_tokens: MAX_COMPLETION_TOKENS, + max_completion_tokens: maxTokens, top_p: 0.5, frequency_penalty: 0, presence_penalty: 0, @@ -144,7 +98,7 @@ export class Completions extends SuperOpenAi { } async handleFunctionCalling(res: OpenAI.Chat.Completions.ChatCompletion, params: CreationParams) { - const { systemMessage, prompt, model, additionalContext, localContext, groundTruths, botName } = params; + const { systemMessage, query, model, additionalContext, localContext, groundTruths, botName, maxTokens } = params; if (res.choices[0].finish_reason === "function_call") { const toolCalls = res.choices[0].message.tool_calls; const choiceMessage = res.choices[0]["message"]; @@ -218,12 +172,13 @@ export class Completions extends SuperOpenAi { return await this.createCompletion( { systemMessage, - prompt, + query, model, additionalContext, localContext, groundTruths, botName, + maxTokens, }, newChat ); diff --git a/src/adapters/openai/helpers/create-system-msg.ts b/src/adapters/openai/helpers/create-system-msg.ts index 94ecfca..532f6a0 100644 --- a/src/adapters/openai/helpers/create-system-msg.ts +++ b/src/adapters/openai/helpers/create-system-msg.ts @@ -1,17 +1,13 @@ export function createSystemMessage(systemMessage: string, additionalContext: string[], localContext: string[], groundTruths: string[], botName: string) { // safer to use array join than string concatenation const parts = [ - "You Must obey the following ground truths: [", - groundTruths.join(":"), - "]\n", + `You Must obey the following ground truths: ${JSON.stringify(groundTruths)}\n`, systemMessage, - "Your name is : ", - botName, - "\n", - "Primary Context: ", - additionalContext.join("\n"), - "\nLocal Context: ", + `Your name is: ${botName}`, + "Main Context (Provide additional precedence in terms of information): ", localContext.join("\n"), + "Secondary Context: ", + additionalContext.join("\n"), ]; return parts.join("\n"); diff --git a/src/adapters/openai/helpers/prompts.ts b/src/adapters/openai/helpers/prompts.ts index 3110adc..c1ceed3 100644 --- a/src/adapters/openai/helpers/prompts.ts +++ b/src/adapters/openai/helpers/prompts.ts @@ -1,4 +1,4 @@ -export const DEFAULT_SYSTEM_MESSAGE = `You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions. +export const CHATBOT_DEFAULT_SYSTEM_MESSAGE = `You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions. # Steps diff --git a/src/adapters/openai/types.ts b/src/adapters/openai/types.ts index 1f0497d..7d6fa6f 100644 --- a/src/adapters/openai/types.ts +++ b/src/adapters/openai/types.ts @@ -6,22 +6,24 @@ export type TokenUsage = { input: number; output: number; total: number; - outputDetails?: OpenAI.Completions.CompletionUsage.CompletionTokensDetails; + reasoning_tokens?: number; }; export type ResponseFromLlm = { answer: string; + groundTruths: string[]; tokenUsage: TokenUsage; }; export type CreationParams = { systemMessage: string; - prompt: string; + query: string; model: string; additionalContext: string[]; localContext: string[]; groundTruths: string[]; botName: string; + maxTokens: number; }; export type ToolCallResponse = { diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index a9869b3..86c8e9d 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -1,96 +1,113 @@ import { Context } from "../types"; -import { ResponseFromLlm } from "../adapters/openai/helpers/completions"; import { CommentSimilaritySearchResult } from "../adapters/supabase/helpers/comment"; import { IssueSimilaritySearchResult } from "../adapters/supabase/helpers/issues"; import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; import { formatChatHistory } from "../helpers/format-chat-history"; -import { optimizeContext } from "../helpers/issue"; -import { DEFAULT_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; import { fetchRepoDependencies, fetchRepoLanguageStats } from "./ground-truths/chat-bot"; import { findGroundTruths } from "./ground-truths/find-ground-truths"; import { bubbleUpErrorComment, logger } from "../helpers/errors"; +import { ResponseFromLlm } from "../adapters/openai/types"; +import { CHATBOT_DEFAULT_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; -/** - * Asks a question to GPT and returns the response - * @param context - The context object containing environment and configuration details - * @param question - The question to ask GPT - * @returns The response from GPT - * @throws If no question is provided - */ export async function askQuestion(context: Context<"issue_comment.created">, question: string) { + if (!question) { + throw logger.error("No question provided"); + } + // using any links in comments or issue/pr bodies to fetch more context + const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ + context, + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + issueNum: context.payload.issue.number, + }); + // build a nicely structure system message containing a streamlined chat history + // includes the current issue, any linked issues, and any linked PRs + const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + logger.info(`${formattedChat.join("")}`); + return await askLlm(context, question, formattedChat); +} +export async function askLlm(context: Context, question: string, formattedChat: string[]): Promise { + const { + env: { UBIQUITY_OS_APP_NAME }, + config: { model, similarityThreshold, maxTokens }, + adapters: { + supabase: { comment, issue }, + voyage: { reranker }, + openai: { completions }, + }, + } = context; - export async function askQuestion(context: Context, question: string) { - if (!question) { - throw logger.error("No question provided"); - } - // using any links in comments or issue/pr bodies to fetch more context - const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ - context, - owner: context.payload.repository.owner.login, - repo: context.payload.repository.name, - issueNum: context.payload.issue.number, - }); - // build a nicely structure system message containing a streamlined chat history - // includes the current issue, any linked issues, and any linked PRs - const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - logger.info(`${formattedChat.join("")}`); - return await askLlm(context, question, formattedChat); - } + try { + // using db functions to find similar comments and issues + const [similarComments, similarIssues] = await Promise.all([ + comment.findSimilarComments(question, 1 - similarityThreshold, ""), + issue.findSimilarIssues(question, 1 - similarityThreshold, ""), + ]); - export async function askLlm(context: Context, question: string, formattedChat: string[]): Promise { - const { - env: { UBIQUITY_OS_APP_NAME }, - config: { model, similarityThreshold, maxTokens }, - adapters: { - supabase: { comment, issue }, - voyage: { reranker }, - openai: { completions }, - }, - } = context; + // combine the similar comments and issues into a single array + const similarText = [ + ...(similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || []), + ...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []), + ]; - try { - // using db functions to find similar comments and issues - const [similarComments, similarIssues] = await Promise.all([ - comment.findSimilarComments(question, 1 - similarityThreshold, ""), - issue.findSimilarIssues(question, 1 - similarityThreshold, ""), - ]); + // filter out any empty strings + formattedChat = formattedChat.filter((text) => text); - // combine the similar comments and issues into a single array - const similarText = [ - ...(similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || []), - ...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []), - ]; + logger.info(`Found similar texts: pre-rerank`, { + similarComments, + similarIssues, + }); - // filter out any empty strings - formattedChat = formattedChat.filter((text) => text); + // rerank the similar text using voyageai + const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; - // rerank the similar text using voyageai - const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; - // gather structural data about the payload repository - const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); + logger.info(`Found similar texts: post-rerank`, { + rerankedText, + }); - let groundTruths: string[] = []; + // gather structural data about the payload repository + const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); - if (!languages.length) { - groundTruths.push("No languages found in the repository"); - } + let groundTruths: string[] = []; - if (!Reflect.ownKeys(dependencies).length) { - groundTruths.push("No dependencies found in the repository"); - } + if (!languages.length) { + groundTruths.push("No languages found in the repository"); + } - if (!Reflect.ownKeys(devDependencies).length) { - groundTruths.push("No devDependencies found in the repository"); - } + if (!Reflect.ownKeys(dependencies).length) { + groundTruths.push("No dependencies found in the repository"); + } - if (groundTruths.length === 3) { - return await completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); - } + if (!Reflect.ownKeys(devDependencies).length) { + groundTruths.push("No devDependencies found in the repository"); + } - groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); - return await completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); - } catch (error) { - throw bubbleUpErrorComment(context, error, false); + if (groundTruths.length === 3) { + return await completions.createCompletion({ + systemMessage: CHATBOT_DEFAULT_SYSTEM_MESSAGE, + query: question, + model, + additionalContext: rerankedText, + localContext: formattedChat, + groundTruths, + botName: UBIQUITY_OS_APP_NAME, + maxTokens, + }); } + + groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); + return await completions.createCompletion({ + systemMessage: CHATBOT_DEFAULT_SYSTEM_MESSAGE, + query: question, + model, + additionalContext: rerankedText, + localContext: formattedChat, + groundTruths, + botName: UBIQUITY_OS_APP_NAME, + maxTokens, + }); + } catch (error) { + throw bubbleUpErrorComment(context, error, false); } +} diff --git a/src/handlers/comment-created-callback.ts b/src/handlers/comment-created-callback.ts index 9a9d641..10784b2 100644 --- a/src/handlers/comment-created-callback.ts +++ b/src/handlers/comment-created-callback.ts @@ -31,11 +31,9 @@ export async function issueCommentCreatedCallback( } logger.info(`Asking question: ${question}`); return await handleLlmQueryOutput(context, await askQuestion(context, question)); - - } -function createStructuredMetadata(header: string | undefined, logReturn: LogReturn) { +export function createStructuredMetadata(header: string | undefined, logReturn: LogReturn) { let logMessage, metadata; if (logReturn) { logMessage = logReturn.logMessage; diff --git a/src/handlers/llm-query-output.ts b/src/handlers/llm-query-output.ts index 2ecafd1..bda763b 100644 --- a/src/handlers/llm-query-output.ts +++ b/src/handlers/llm-query-output.ts @@ -1,8 +1,9 @@ -import { ResponseFromLlm } from "../adapters/openai/helpers/completions"; +import { ResponseFromLlm } from "../adapters/openai/types"; import { bubbleUpErrorComment } from "../helpers/errors"; import { Context } from "../types"; import { CallbackResult } from "../types/proxy"; import { addCommentToIssue } from "./add-comment"; +import { createStructuredMetadata } from "./comment-created-callback"; export async function handleLlmQueryOutput(context: Context, llmResponse: ResponseFromLlm): Promise { const { logger } = context; @@ -12,7 +13,6 @@ export async function handleLlmQueryOutput(context: Context, llmResponse: Respon throw logger.error(`No answer from OpenAI`); } logger.info(`Answer: ${answer}`, { tokenUsage }); - const tokens = `\n\n`; const metadataString = createStructuredMetadata( // don't change this header, it's used for tracking @@ -30,4 +30,4 @@ export async function handleLlmQueryOutput(context: Context, llmResponse: Respon } catch (error) { throw await bubbleUpErrorComment(context, error, false); } -} \ No newline at end of file +} diff --git a/src/types/github-types.ts b/src/types/github-types.ts index af351f3..b5692de 100644 --- a/src/types/github-types.ts +++ b/src/types/github-types.ts @@ -39,4 +39,4 @@ export type FetchedCodes = { org: string; repo: string; issueNumber: number; -}; \ No newline at end of file +}; diff --git a/src/types/llm.ts b/src/types/llm.ts index c92035b..7d5bedf 100644 --- a/src/types/llm.ts +++ b/src/types/llm.ts @@ -15,16 +15,16 @@ type CodeReviewAppParams = { export type AppParamsHelper = TApp extends "code-review" ? CodeReviewAppParams : TApp extends "chat-bot" - ? ChatBotAppParams - : never; + ? ChatBotAppParams + : never; export type CompletionsModelHelper = TApp extends "code-review" ? "gpt-4o" : TApp extends "chat-bot" ? "o1-mini" : never; export type GroundTruthsSystemMessage = TApp extends "code-review" ? (typeof GROUND_TRUTHS_SYSTEM_MESSAGES)["code-review"] : TApp extends "chat-bot" - ? (typeof GROUND_TRUTHS_SYSTEM_MESSAGES)["chat-bot"] - : never; + ? (typeof GROUND_TRUTHS_SYSTEM_MESSAGES)["chat-bot"] + : never; export type GroundTruthsSystemMessageTemplate = { truthRules: string[]; diff --git a/tests/main.test.ts b/tests/main.test.ts index 0c888dd..701b5b0 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -11,7 +11,7 @@ import { runPlugin } from "../src/plugin"; import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; import { envSchema } from "../src/types/env"; import { ResponseFromLlm } from "../src/adapters/openai/helpers/completions"; -import { CompletionsType } from "../src/adapters/openai/helpers/completions"; +import { ResponseFromLlm } from "../src/adapters/openai/helpers/completions"; import { logger } from "../src/helpers/errors"; const TEST_QUESTION = "what is pi?"; @@ -412,7 +412,7 @@ function createContext(body = TEST_SLASH_COMMAND) { getModelMaxOutputLimit: () => { return 50000; }, - createCompletion: async (): Promise => { + createCompletion: async (): Promise => { return { answer: MOCK_ANSWER, groundTruths: [MOCK_ANSWER],