diff --git a/.cspell.json b/.cspell.json index 65d0f95..ff444a7 100644 --- a/.cspell.json +++ b/.cspell.json @@ -31,10 +31,15 @@ "nemo", "Reranking", "mistralai", + "Precheck", "Typeguard", "typeguards", "OPENROUTER_API_KEY", - "Openrouter" + "Openrouter", + "flac", + "dylib", + "mobileprovision", + "icns" ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], diff --git a/.gitignore b/.gitignore index e23b105..c765e4d 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,6 @@ junit.xml cypress/screenshots script.ts .wrangler -test-dashboard.md \ No newline at end of file +test-dashboard.md +payloads.json +test-dashboard.md diff --git a/manifest.json b/manifest.json index 5d6ce58..5c92a0c 100644 --- a/manifest.json +++ b/manifest.json @@ -1,5 +1,5 @@ { "name": "command-ask", "description": "A highly context aware organization integrated chatbot", - "ubiquity:listeners": ["issue_comment.created"] + "ubiquity:listeners": ["issue_comment.created", "pull_request.opened", "pull_request.ready_for_review"] } diff --git a/package.json b/package.json index ece6959..34a965c 100644 --- a/package.json +++ b/package.json @@ -28,13 +28,14 @@ ], "dependencies": { "@mswjs/data": "^0.16.2", + "@octokit/graphql-schema": "^15.25.0", "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", "@supabase/supabase-js": "^2.45.4", + "@ubiquity-os/ubiquity-os-kernel": "^2.4.0", "@ubiquity-os/ubiquity-os-logger": "^1.3.2", "dotenv": "^16.4.5", - "github-diff-tool": "^1.0.6", "gpt-tokenizer": "^2.5.1", "openai": "^4.63.0", "typebox-validators": "0.3.5", diff --git a/src/adapters/openai/constants.ts b/src/adapters/openai/constants.ts new file mode 100644 index 0000000..9016b50 --- /dev/null +++ b/src/adapters/openai/constants.ts @@ -0,0 +1,3 @@ +// this should probably be passed in via the config + +export const MAX_COMPLETION_TOKENS = 7000; diff --git a/src/adapters/openai/helpers/append-to-base-chat-history.ts b/src/adapters/openai/helpers/append-to-base-chat-history.ts new file mode 100644 index 0000000..ca59cd6 --- /dev/null +++ b/src/adapters/openai/helpers/append-to-base-chat-history.ts @@ -0,0 +1,35 @@ +import { createSystemMessage } from "./create-system-msg"; +import { ChatHistory, CreationParams, ToolCallResponse } from "../types"; + +export function appendToConversation(params: CreationParams, toolCallsToAppend: ToolCallResponse[] = []): ChatHistory { + const { systemMessage, query, additionalContext, localContext, groundTruths, botName } = params; + const baseChat: ChatHistory = [ + { + role: "system", + content: [ + { + type: "text", + text: createSystemMessage(systemMessage, additionalContext, localContext, groundTruths, botName), + }, + ], + }, + { + role: "user", + content: [ + { + type: "text", + text: query, + }, + ], + }, + ]; + + if (toolCallsToAppend.length > 0) { + toolCallsToAppend.forEach((toolCallResponse) => { + baseChat.push(toolCallResponse.response); + baseChat.push(toolCallResponse.tool_call_response); + }); + } + + return baseChat; +} diff --git a/src/adapters/openai/helpers/call-handler.ts b/src/adapters/openai/helpers/call-handler.ts new file mode 100644 index 0000000..857ac54 --- /dev/null +++ b/src/adapters/openai/helpers/call-handler.ts @@ -0,0 +1,171 @@ +import OpenAI from "openai"; +import { LLM_FUNCTIONS, LLM_TOOLS } from "./llm-tools"; +import { Context } from "../../../types"; +import { getIssueNumberFromPayload } from "../../../helpers/get-issue-no-from-payload"; +import { logger } from "../../../helpers/errors"; +import { ChatHistory, ResponseFromLlm } from "../types"; +import { getAnswerAndTokenUsage } from "./get-answer-and-token-usage"; + +export async function handleChat(context: Context, chatHistory: ChatHistory) { + const response = await singleResponse(context, chatHistory); + return await handleResponse(context, response, chatHistory); +} + +async function singleResponse(context: Context, chatHistory: ChatHistory) { + const { + config: { model }, + env: { OPENAI_API_KEY }, + } = context; + const openAi = new OpenAI({ + apiKey: OPENAI_API_KEY, + }); + + return await openAi.chat.completions.create({ + messages: chatHistory, + model, + max_tokens: 7000, + temperature: 0, + tools: LLM_TOOLS, + tool_choice: "auto", + }); +} + +async function handleResponse( + context: Context, + response: OpenAI.Chat.Completions.ChatCompletion, + chatHistory: ChatHistory +): Promise { + let chainCount = 0; + let toolIndex = 0; + let funcName = response.choices[0].message.tool_calls?.[0].function?.name; + let funcParams = response.choices[0].message.tool_calls?.[0].function?.arguments; + const toolCalls = response.choices[0].message.tool_calls?.length; + + const answerAndUsage = getAnswerAndTokenUsage(response); + + if (!toolCalls) { + return { + ...answerAndUsage, + chatHistory, + }; + } + + while (toolCalls > 0) { + chainCount++; + console.log(`Chain count: ${chainCount}`); + console.log(`Response ${chainCount}: ${response.choices[0].message.content}`); + const toolCallFn = agentCommands.find((command) => command.name === funcName); + + let argObj: Record; + if (funcParams) { + argObj = JSON.parse(funcParams); + } else { + argObj = {}; + } + + try { + if (toolCallFn && toolCallFn.func) { + const issueNumber = getIssueNumberFromPayload(context.payload); + const args = toolCallFn?.expectedArgs.map((arg: string) => argObj[arg]) || []; + const result = await toolCallFn?.func(...args, { + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + octokit: context.octokit, + pull_number: issueNumber, + }); + + chatHistory.push({ + role: "tool", + content: result, + tool_call_id: response.choices[0].message.tool_calls?.[toolIndex]?.id || "", + }); + } + } catch (err) { + console.log("===================================="); + console.log("err:", err); + console.log("===================================="); + } + toolIndex++; + + if (!response.choices[0].message.tool_calls?.[toolIndex]) { + break; + } + + funcName = response.choices[0].message.tool_calls?.[toolIndex]?.function.name; + funcParams = response.choices[0].message.tool_calls?.[toolIndex]?.function.arguments; + } + + response = await singleResponse(context, chatHistory); + + const lastResponse = getAnswerAndTokenUsage(response); + + if (!lastResponse.answer) { + throw logger.error("No response found in handleResponse", { + response, + chatHistory, + chainCount, + toolCalls, + toolIndex, + }); + } + const { + tokenUsage: { outputDetails: lastOutputDetails }, + } = lastResponse; + const { + tokenUsage: { outputDetails: firstOutputDetails }, + } = answerAndUsage; + + let totalReasoningTokens = 0; + + if (lastOutputDetails && lastOutputDetails.reasoning_tokens) { + totalReasoningTokens += lastOutputDetails.reasoning_tokens; + } + + if (firstOutputDetails && firstOutputDetails.reasoning_tokens) { + totalReasoningTokens += firstOutputDetails.reasoning_tokens; + } + + return { + answer: lastResponse.answer, + chatHistory, + tokenUsage: { + input: answerAndUsage.tokenUsage.input + lastResponse.tokenUsage.input, + output: answerAndUsage.tokenUsage.output + lastResponse.tokenUsage.output, + total: answerAndUsage.tokenUsage.total + lastResponse.tokenUsage.total, + outputDetails: { + reasoning_tokens: totalReasoningTokens, + }, + }, + }; +} + +function isValidTool(name: string) { + return LLM_TOOLS.some((tool) => tool.function.name === `${name}Tool`); +} + +type AgentCommand = { + name: string; + // eslint-disable-next-line @typescript-eslint/ban-types + func: Function; + expectedArgs: string[]; +}; + +/** + * Handles function calling/response chaining for our models. + */ +const agentCommands: AgentCommand[] = LLM_TOOLS.map((tool) => { + // tools should be named like: fnNameTool > fnName (convertPullToDraftTool > convertPullToDraft) + // where fnNameTool is the api consumed by the LLM and fnName is the actual function + const fnName = tool.function.name.replace("Tool", ""); + + if (!isValidTool(fnName)) { + throw new Error(`Invalid tool called: ${fnName}`); + } + + return { + name: tool.function.name, + // eslint-disable-next-line @typescript-eslint/ban-types + func: LLM_FUNCTIONS.find((fn) => fn.name === fnName) as Function, + expectedArgs: JSON.parse(JSON.stringify(tool.function.parameters?.required)) as string[], + }; +}); diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 3a5f24a..bd7c980 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -1,19 +1,13 @@ import OpenAI from "openai"; import { Context } from "../../../types"; import { SuperOpenAi } from "./openai"; +import { logger } from "../../../helpers/errors"; +import { appendToConversation } from "./append-to-base-chat-history"; +import { getAnswerAndTokenUsage } from "./get-answer-and-token-usage"; +import { CreationParams, ResponseFromLlm, ToolCallResponse } from "../types"; import { CompletionsModelHelper, ModelApplications } from "../../../types/llm"; import { encode } from "gpt-tokenizer"; -export interface CompletionsType { - answer: string; - groundTruths: string[]; - tokenUsage: { - input: number; - output: number; - total: number; - }; -} - export class Completions extends SuperOpenAi { protected context: Context; @@ -22,50 +16,74 @@ export class Completions extends SuperOpenAi { this.context = context; } + getModelMaxTokenLimit(model: string): number { + // could be made more robust, unfortunately, there's no endpoint to get the model token limit + const tokenLimits = new Map([ + ["o1-mini", 128_000], + ["o1-preview", 128_000], + ["gpt-4-turbo", 128_000], + ["gpt-4o", 128_000], + ["gpt-4o-mini", 128_000], + ["gpt-4", 8_192], + ["gpt-3.5-turbo-0125", 16_385], + ["gpt-3.5-turbo", 16_385], + ]); + + return tokenLimits.get(model) || 128_000; + } + + getModelMaxOutputLimit(model: string): number { + // could be made more robust, unfortunately, there's no endpoint to get the model token limit + const tokenLimits = new Map([ + ["o1-mini", 65_536], + ["o1-preview", 32_768], + ["gpt-4-turbo", 4_096], + ["gpt-4o-mini", 16_384], + ["gpt-4o", 16_384], + ["gpt-4", 8_192], + ["gpt-3.5-turbo-0125", 4_096], + ["gpt-3.5-turbo", 4_096], + ]); + + return tokenLimits.get(model) || 16_384; + } + + async getModelTokenLimit(): Promise { + return this.getModelMaxTokenLimit("o1-mini"); + } + async createCompletion( - prompt: string, - model: string = "o1-mini", - additionalContext: string[], - localContext: string[], - groundTruths: string[], - botName: string, - maxTokens: number - ): Promise { + params: { + systemMessage: string; + query: string; + model: string; + additionalContext: string[]; + localContext: string[]; + groundTruths: string[]; + botName: string; + maxTokens: number; + }, + chatHistory?: OpenAI.Chat.Completions.ChatCompletionMessageParam[] + ): Promise { + const { query, model, additionalContext, localContext, groundTruths, botName, maxTokens } = params; + logger.info(`Creating completion for model: ${model} with query: ${query}`); + logger.info(`Context for completion:`, { + additionalContext, + localContext, + groundTruths, + botName, + }); + const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ + // tools: LLM_TOOLS, might not be a good idea to have this available for the general chatbot model: model, - messages: [ - { - role: "system", - content: [ - { - type: "text", - text: - "You Must obey the following ground truths: [" + - groundTruths.join(":") + - "]\n" + - "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + - "Your name is : " + - botName + - "\n" + - "Main Context (Provide additional precedence in terms of information): " + - localContext.join("\n") + - "Secondary Context: " + - additionalContext.join("\n"), - }, - ], - }, - { - role: "user", - content: [ - { - type: "text", - text: prompt, - }, - ], - }, - ], + messages: chatHistory || appendToConversation(params), temperature: 0.2, - max_tokens: maxTokens, + // This value is now deprecated in favor of max_completion_tokens, and is not compatible with o1 series models. + // max_COMPLETION_tokens: MAX_COMPLETION_TOKENS, + + /**An upper bound for the number of tokens that can be generated for a completion, including visible output tokens and reasoning tokens. */ + max_completion_tokens: maxTokens, top_p: 0.5, frequency_penalty: 0, presence_penalty: 0, @@ -73,6 +91,98 @@ export class Completions extends SuperOpenAi { type: "text", }, }); + + await this.handleFunctionCalling(res, params); + + return getAnswerAndTokenUsage(res); + } + + async handleFunctionCalling(res: OpenAI.Chat.Completions.ChatCompletion, params: CreationParams) { + const { systemMessage, query, model, additionalContext, localContext, groundTruths, botName, maxTokens } = params; + if (res.choices[0].finish_reason === "function_call") { + const toolCalls = res.choices[0].message.tool_calls; + const choiceMessage = res.choices[0]["message"]; + + if (!toolCalls) { + return; + } + + const fnCallResults: ToolCallResponse[] = []; + + for (const toolCall of toolCalls) { + const { name, arguments: args } = toolCall.function; + let parsedArgs: { should_convert: boolean } = JSON.parse(args); + + if (name === "convert_pull_request_to_draft") { + try { + parsedArgs = JSON.parse(args); + } catch (er) { + throw logger.error("Error parsing args for convert_pull_request_to_draft", { + args, + er, + }); + } + let fnCallResponse; + + if (!parsedArgs.should_convert) { + fnCallResponse = { + role: "tool", + content: "pull request meets the specification, no action taken.", + tool_call_id: toolCall.id, + }; + } else { + let number; + + if ("pull_request" in this.context.payload) { + number = this.context.payload.pull_request.number; + } else if ("issue" in this.context.payload) { + number = this.context.payload.issue.number; + } + + if (!number) { + throw logger.error("No pull request or issue number found in payload"); + } + + await this.context.octokit.pulls.update({ + owner: this.context.payload.repository.owner.login, + repo: this.context.payload.repository.name, + pull_number: number, + draft: true, + }); + + fnCallResponse = { + role: "tool", + content: "pull request did not meet the specification, converted to draft.", + tool_call_id: toolCall.id, + }; + } + + fnCallResults.push({ + response: choiceMessage, + tool_call_response: { + content: fnCallResponse.content, + role: "tool", + tool_call_id: toolCall.id, + }, + }); + } + } + const newChat = appendToConversation(params, fnCallResults); + + return await this.createCompletion( + { + systemMessage, + query, + model, + additionalContext, + localContext, + groundTruths, + botName, + maxTokens, + }, + newChat + ); + } const answer = res.choices[0].message; if (answer && answer.content && res.usage) { return { @@ -120,6 +230,7 @@ export class Completions extends SuperOpenAi { } async findTokenLength(prompt: string, additionalContext: string[] = [], localContext: string[] = [], groundTruths: string[] = []): Promise { - return encode(prompt + additionalContext.join("\n") + localContext.join("\n") + groundTruths.join("\n")).length; + // disallowedSpecial: new Set() because we pass the entire diff as the prompt we should account for all special characters + return encode(prompt + additionalContext.join("\n") + localContext.join("\n") + groundTruths.join("\n"), { disallowedSpecial: new Set() }).length; } } diff --git a/src/adapters/openai/helpers/create-system-msg.ts b/src/adapters/openai/helpers/create-system-msg.ts new file mode 100644 index 0000000..532f6a0 --- /dev/null +++ b/src/adapters/openai/helpers/create-system-msg.ts @@ -0,0 +1,14 @@ +export function createSystemMessage(systemMessage: string, additionalContext: string[], localContext: string[], groundTruths: string[], botName: string) { + // safer to use array join than string concatenation + const parts = [ + `You Must obey the following ground truths: ${JSON.stringify(groundTruths)}\n`, + systemMessage, + `Your name is: ${botName}`, + "Main Context (Provide additional precedence in terms of information): ", + localContext.join("\n"), + "Secondary Context: ", + additionalContext.join("\n"), + ]; + + return parts.join("\n"); +} diff --git a/src/adapters/openai/helpers/get-answer-and-token-usage.ts b/src/adapters/openai/helpers/get-answer-and-token-usage.ts new file mode 100644 index 0000000..abe9310 --- /dev/null +++ b/src/adapters/openai/helpers/get-answer-and-token-usage.ts @@ -0,0 +1,18 @@ +import OpenAI from "openai"; +import { ResponseFromLlm } from "../types"; + +export function getAnswerAndTokenUsage(apiResponse: OpenAI.Chat.Completions.ChatCompletion): ResponseFromLlm { + const answer = apiResponse.choices[0].message; + if (answer && answer.content && apiResponse.usage) { + return { + answer: answer.content, + tokenUsage: { + input: apiResponse.usage.prompt_tokens, + output: apiResponse.usage.completion_tokens, + total: apiResponse.usage.total_tokens, + outputDetails: apiResponse.usage.completion_tokens_details, + }, + }; + } + return { answer: "", tokenUsage: { input: 0, output: 0, total: 0, outputDetails: { reasoning_tokens: 0 } } }; +} diff --git a/src/adapters/openai/helpers/llm-tools.ts b/src/adapters/openai/helpers/llm-tools.ts new file mode 100644 index 0000000..6d1ae98 --- /dev/null +++ b/src/adapters/openai/helpers/llm-tools.ts @@ -0,0 +1,25 @@ +import OpenAI from "openai"; +import { convertPullToDraft } from "../../../helpers/pull-helpers/convert-pull-to-draft"; + +export const convertPullToDraftTool: OpenAI.Chat.Completions.ChatCompletionTool = { + type: "function", + function: { + name: "convertPullToDraftTool", + description: "Convert a pull request that does not meet the spec back to draft mode.", + parameters: { + type: "object", + properties: { + should_convert: { + type: "boolean", + description: "Whether to convert the pull request to draft mode.", + }, + }, + required: ["should_convert"], + additionalProperties: false, + }, + }, +}; + +export const LLM_TOOLS = [convertPullToDraftTool]; +export const LLM_FUNCTIONS = [convertPullToDraft]; +export type ToolFunctions = typeof LLM_FUNCTIONS; diff --git a/src/adapters/openai/helpers/prompts.ts b/src/adapters/openai/helpers/prompts.ts new file mode 100644 index 0000000..c1ceed3 --- /dev/null +++ b/src/adapters/openai/helpers/prompts.ts @@ -0,0 +1,32 @@ +export const CHATBOT_DEFAULT_SYSTEM_MESSAGE = `You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions. + +# Steps + +1. **Understand Context**: Review the chat history and any similar provided responses to understand the context. +2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus. +3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response. +4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query. +5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary. + +# Output Format + +- Concise and coherent responses in paragraphs that directly address the user's question. +- Incorporate inline code snippets or references from the documentation if relevant. + +# Examples + +**Example 1** + +*Input:* +- Chat History: "What was the original reason for moving the LP tokens?" +- Corpus Excerpts: "It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not." + +*Output:* +"It was due to missing LP tokens issue from the MasterChefV2.1 Contract. + +# Notes + +- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query. +- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information.`; + +export const PULL_PRECHECK_SYSTEM_MESSAGE = `Perform code review using the diff and spec.`; diff --git a/src/adapters/openai/types.ts b/src/adapters/openai/types.ts new file mode 100644 index 0000000..7d6fa6f --- /dev/null +++ b/src/adapters/openai/types.ts @@ -0,0 +1,36 @@ +import OpenAI from "openai"; + +export type ChatHistory = OpenAI.Chat.Completions.ChatCompletionMessageParam[]; + +export type TokenUsage = { + input: number; + output: number; + total: number; + reasoning_tokens?: number; +}; + +export type ResponseFromLlm = { + answer: string; + groundTruths: string[]; + tokenUsage: TokenUsage; +}; + +export type CreationParams = { + systemMessage: string; + query: string; + model: string; + additionalContext: string[]; + localContext: string[]; + groundTruths: string[]; + botName: string; + maxTokens: number; +}; + +export type ToolCallResponse = { + response: OpenAI.Chat.Completions.ChatCompletionMessage; + tool_call_response: { + role: "tool"; + content: string; + tool_call_id: string; + }; +}; diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index ec4a731..3291644 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -1,3 +1,4 @@ +import { getIssueNumberFromPayload } from "../helpers/get-issue-no-from-payload"; import { Context } from "../types/context"; /** @@ -7,7 +8,8 @@ import { Context } from "../types/context"; */ export async function addCommentToIssue(context: Context, message: string) { const { payload } = context; - const issueNumber = payload.issue.number; + const issueNumber = getIssueNumberFromPayload(payload); + try { await context.octokit.issues.createComment({ owner: payload.repository.owner.login, diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index f0a262c..86c8e9d 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -1,42 +1,33 @@ import { Context } from "../types"; -import { CompletionsType } from "../adapters/openai/helpers/completions"; import { CommentSimilaritySearchResult } from "../adapters/supabase/helpers/comment"; import { IssueSimilaritySearchResult } from "../adapters/supabase/helpers/issues"; import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; import { formatChatHistory } from "../helpers/format-chat-history"; import { fetchRepoDependencies, fetchRepoLanguageStats } from "./ground-truths/chat-bot"; import { findGroundTruths } from "./ground-truths/find-ground-truths"; -import { bubbleUpErrorComment } from "../helpers/errors"; +import { bubbleUpErrorComment, logger } from "../helpers/errors"; +import { ResponseFromLlm } from "../adapters/openai/types"; +import { CHATBOT_DEFAULT_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; -/** - * Asks a question to GPT and returns the response - * @param context - The context object containing environment and configuration details - * @param question - The question to ask GPT - * @returns The response from GPT - * @throws If no question is provided - */ -export async function askQuestion(context: Context, question: string) { +export async function askQuestion(context: Context<"issue_comment.created">, question: string) { if (!question) { - throw context.logger.error("No question provided"); + throw logger.error("No question provided"); } + // using any links in comments or issue/pr bodies to fetch more context const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context, owner: context.payload.repository.owner.login, repo: context.payload.repository.name, + issueNum: context.payload.issue.number, }); + // build a nicely structure system message containing a streamlined chat history + // includes the current issue, any linked issues, and any linked PRs const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - context.logger.info(`${formattedChat.join("")}`); - return await askGpt(context, question, formattedChat); + logger.info(`${formattedChat.join("")}`); + return await askLlm(context, question, formattedChat); } -/** - * Asks GPT a question and returns the completions - * @param context - The context object containing environment and configuration details - * @param question - The question to ask GPT - * @param formattedChat - The formatted chat history to provide context to GPT - * @returns completions - The completions generated by GPT - **/ -export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { +export async function askLlm(context: Context, question: string, formattedChat: string[]): Promise { const { env: { UBIQUITY_OS_APP_NAME }, config: { model, similarityThreshold, maxTokens }, @@ -45,35 +36,78 @@ export async function askGpt(context: Context, question: string, formattedChat: voyage: { reranker }, openai: { completions }, }, - logger, } = context; try { + // using db functions to find similar comments and issues const [similarComments, similarIssues] = await Promise.all([ comment.findSimilarComments(question, 1 - similarityThreshold, ""), - issue.findSimilarIssues(question, 1 - similarityThreshold, "") + issue.findSimilarIssues(question, 1 - similarityThreshold, ""), ]); + // combine the similar comments and issues into a single array const similarText = [ - ...similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || [], - ...similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || [] + ...(similarComments?.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext) || []), + ...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []), ]; - formattedChat = formattedChat.filter(text => text); + // filter out any empty strings + formattedChat = formattedChat.filter((text) => text); + logger.info(`Found similar texts: pre-rerank`, { + similarComments, + similarIssues, + }); + + // rerank the similar text using voyageai const rerankedText = similarText.length > 0 ? await reranker.reRankResults(similarText, question) : []; - const [languages, { dependencies, devDependencies }] = await Promise.all([ - fetchRepoLanguageStats(context), - fetchRepoDependencies(context) - ]); - const groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); + logger.info(`Found similar texts: post-rerank`, { + rerankedText, + }); + + // gather structural data about the payload repository + const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); + + let groundTruths: string[] = []; - const numTokens = await completions.findTokenLength(question, rerankedText, formattedChat, groundTruths); - logger.info(`Number of tokens: ${numTokens}`); + if (!languages.length) { + groundTruths.push("No languages found in the repository"); + } - return completions.createCompletion(question, model, rerankedText, formattedChat, groundTruths, UBIQUITY_OS_APP_NAME, maxTokens); + if (!Reflect.ownKeys(dependencies).length) { + groundTruths.push("No dependencies found in the repository"); + } + + if (!Reflect.ownKeys(devDependencies).length) { + groundTruths.push("No devDependencies found in the repository"); + } + + if (groundTruths.length === 3) { + return await completions.createCompletion({ + systemMessage: CHATBOT_DEFAULT_SYSTEM_MESSAGE, + query: question, + model, + additionalContext: rerankedText, + localContext: formattedChat, + groundTruths, + botName: UBIQUITY_OS_APP_NAME, + maxTokens, + }); + } + + groundTruths = await findGroundTruths(context, "chat-bot", { languages, dependencies, devDependencies }); + return await completions.createCompletion({ + systemMessage: CHATBOT_DEFAULT_SYSTEM_MESSAGE, + query: question, + model, + additionalContext: rerankedText, + localContext: formattedChat, + groundTruths, + botName: UBIQUITY_OS_APP_NAME, + maxTokens, + }); } catch (error) { throw bubbleUpErrorComment(context, error, false); } -} \ No newline at end of file +} diff --git a/src/handlers/comment-created-callback.ts b/src/handlers/comment-created-callback.ts index ae44fbe..10784b2 100644 --- a/src/handlers/comment-created-callback.ts +++ b/src/handlers/comment-created-callback.ts @@ -1,8 +1,8 @@ import { Context, SupportedEvents } from "../types"; -import { addCommentToIssue } from "./add-comment"; -import { askQuestion } from "./ask-llm"; import { CallbackResult } from "../types/proxy"; -import { bubbleUpErrorComment, sanitizeMetadata } from "../helpers/errors"; +import { askQuestion } from "./ask-llm"; +import { handleLlmQueryOutput } from "./llm-query-output"; +import { sanitizeMetadata } from "../helpers/errors"; import { LogReturn } from "@ubiquity-os/ubiquity-os-logger"; export async function issueCommentCreatedCallback( @@ -26,32 +26,14 @@ export async function issueCommentCreatedCallback( if (context.payload.comment.user?.type === "Bot") { return { status: 204, reason: logger.info("Comment is from a bot. Skipping.").logMessage.raw }; } - - logger.info(`Asking question: ${question}`); - let commentToPost; - try { - const response = await askQuestion(context, question); - const { answer, tokenUsage, groundTruths } = response; - if (!answer) { - throw logger.error(`No answer from OpenAI`); - } - logger.info(`Answer: ${answer}`, { tokenUsage }); - - const metadata = { - groundTruths, - tokenUsage, - }; - - const metadataString = createStructuredMetadata("LLM Ground Truths and Token Usage", logger.info(`Answer: ${answer}`, { metadata })); - commentToPost = answer + metadataString; - await addCommentToIssue(context, commentToPost); - return { status: 200, reason: logger.info("Comment posted successfully").logMessage.raw }; - } catch (error) { - throw await bubbleUpErrorComment(context, error, false); + if (question.replace(slugRegex, "").trim().length === 0) { + return { status: 204, reason: logger.info("Comment is empty. Skipping.").logMessage.raw }; } + logger.info(`Asking question: ${question}`); + return await handleLlmQueryOutput(context, await askQuestion(context, question)); } -function createStructuredMetadata(header: string | undefined, logReturn: LogReturn) { +export function createStructuredMetadata(header: string | undefined, logReturn: LogReturn) { let logMessage, metadata; if (logReturn) { logMessage = logReturn.logMessage; diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index b033686..8d1418e 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -10,11 +10,14 @@ import { StreamlinedComment } from "../types/llm"; */ export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { const streamlinedComments: Record = {}; + for (const issue of linkedIssues) { const linkedIssueComments = issue.comments || []; if (linkedIssueComments.length === 0) continue; + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); if (!linkedStreamlinedComments) continue; + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { streamlinedComments[key] = [...(streamlinedComments[key] || []), ...value]; } @@ -74,15 +77,15 @@ export function createKey(issueUrl: string, issue?: number) { */ export function streamlineComments(comments: SimplifiedComment[]) { const streamlined: Record = {}; + for (const comment of comments) { const { user, issueUrl: url, body } = comment; - // Skip bot comments if (user?.type === "Bot") continue; + const key = createKey(url); const [owner, repo] = splitKey(key); - if (!streamlined[key]) { - streamlined[key] = []; - } + streamlined[key] ??= []; + if (user && body) { streamlined[key].push({ user: user.login, diff --git a/src/handlers/find-ground-truths.ts b/src/handlers/find-ground-truths.ts new file mode 100644 index 0000000..c861463 --- /dev/null +++ b/src/handlers/find-ground-truths.ts @@ -0,0 +1,86 @@ +import OpenAI from "openai"; +import { Context } from "../types"; +import { logger } from "../helpers/errors"; + +const FIND_GROUND_TRUTHS_SYSTEM_MESSAGE = `Using the input provided, your goal is to produce an array of strings that represent "Ground Truths." + These ground truths are high-level abstractions that encapsulate the key aspects of the task. + They serve to guide and inform our code review model's interpretation of the task by providing clear, concise, and explicit insights. + + Each ground truth should: + - Be succinct and easy to understand. + - Directly pertain to the task at hand. + - Focus on essential requirements, behaviors, or assumptions involved in the task. + + Example: + Task: Implement a function that adds two numbers. + Ground Truths: + - The function should accept two numerical inputs. + - The function should return the sum of the two inputs. + - Inputs must be validated to ensure they are numbers. + + Based on the given task, generate similar ground truths adhering to a maximum of 10. + + Return a JSON parsable array of strings representing the ground truths, without comment or directive.`; + +function validateGroundTruths(truthsString: string): string[] { + let truths; + try { + truths = JSON.parse(truthsString); + } catch (err) { + throw logger.error("Failed to parse ground truths"); + } + if (!Array.isArray(truths)) { + throw logger.error("Ground truths must be an array"); + } + + if (truths.length > 10) { + throw logger.error("Ground truths must not exceed 10"); + } + + truths.forEach((truth: string) => { + if (typeof truth !== "string") { + throw logger.error("Each ground truth must be a string"); + } + }); + + return truths; +} + +export async function findGroundTruths(context: Context, groundTruthSource: string) { + const { + env: { OPENAI_API_KEY }, + config: { openAiBaseUrl, model }, + } = context; + + const openAi = new OpenAI({ + apiKey: OPENAI_API_KEY, + ...(openAiBaseUrl && { baseURL: openAiBaseUrl }), + }); + + const res = await openAi.chat.completions.create({ + messages: [ + { + role: "system", + content: FIND_GROUND_TRUTHS_SYSTEM_MESSAGE, + }, + { + role: "user", + content: groundTruthSource, + }, + ], + /** + * I've used the config model here but in my opinion, + * we should optimize this for a quicker response which + * means no advanced reasoning models. rfc + */ + model: model, + }); + + const output = res.choices[0].message.content; + + if (!output) { + throw logger.error("Failed to produce a ground truths response"); + } + + return validateGroundTruths(output); +} diff --git a/src/handlers/ground-truths/chat-bot.ts b/src/handlers/ground-truths/chat-bot.ts index 6d087d2..de32e8c 100644 --- a/src/handlers/ground-truths/chat-bot.ts +++ b/src/handlers/ground-truths/chat-bot.ts @@ -68,6 +68,7 @@ export async function fetchRepoLanguageStats(context: Context) { return Array.from(Object.entries(stats)).sort((a, b) => b[1] - a[1]); } catch (err) { - throw logger.error(`Error fetching language stats for ${owner}/${repo}`, { err }); + logger.error(`Error fetching language stats for ${owner}/${repo}`, { err }); + return []; } } diff --git a/src/handlers/llm-query-output.ts b/src/handlers/llm-query-output.ts new file mode 100644 index 0000000..bda763b --- /dev/null +++ b/src/handlers/llm-query-output.ts @@ -0,0 +1,33 @@ +import { ResponseFromLlm } from "../adapters/openai/types"; +import { bubbleUpErrorComment } from "../helpers/errors"; +import { Context } from "../types"; +import { CallbackResult } from "../types/proxy"; +import { addCommentToIssue } from "./add-comment"; +import { createStructuredMetadata } from "./comment-created-callback"; + +export async function handleLlmQueryOutput(context: Context, llmResponse: ResponseFromLlm): Promise { + const { logger } = context; + try { + const { answer, tokenUsage, groundTruths } = llmResponse; + if (!answer) { + throw logger.error(`No answer from OpenAI`); + } + logger.info(`Answer: ${answer}`, { tokenUsage }); + + const metadataString = createStructuredMetadata( + // don't change this header, it's used for tracking + "ubiquity-os-llm-response", + logger.info(`Answer: ${answer}`, { + metadata: { + groundTruths, + tokenUsage, + }, + }) + ); + + await addCommentToIssue(context, answer + metadataString); + return { status: 200, reason: logger.info("Comment posted successfully").logMessage.raw }; + } catch (error) { + throw await bubbleUpErrorComment(context, error, false); + } +} diff --git a/src/handlers/pull-precheck.ts b/src/handlers/pull-precheck.ts new file mode 100644 index 0000000..d19fe90 --- /dev/null +++ b/src/handlers/pull-precheck.ts @@ -0,0 +1,72 @@ +import { PULL_PRECHECK_SYSTEM_MESSAGE } from "../adapters/openai/helpers/prompts"; +import { fetchPullRequestDiff } from "../helpers/issue-fetching"; +import { canPerformReview } from "../helpers/pull-helpers/can-perform-review"; +import { getTaskSpecFromPullRequest } from "../helpers/pull-helpers/get-task-spec"; +import { hasCollaboratorConvertedPr } from "../helpers/pull-helpers/has-collaborator-converted"; +import { Context, SupportedEvents } from "../types"; +import { CallbackResult } from "../types/proxy"; +import { findGroundTruths } from "./find-ground-truths"; +import { handleChat } from "../adapters/openai/helpers/call-handler"; +// import { handleLlmQueryOutput } from "./llm-query-output"; + +export async function performPullPrecheck( + context: Context<"pull_request.opened" | "pull_request.ready_for_review", SupportedEvents["pull_request.opened" | "pull_request.ready_for_review"]> +): Promise { + const { logger, payload } = context; + const { pull_request } = payload; + + // Check if PR is in draft mode, closed, or if we can perform a review + if (pull_request.draft) { + return { status: 200, reason: logger.info("PR is in draft mode, no action required").logMessage.raw }; + } else if (pull_request.state === "closed") { + return { status: 200, reason: logger.info("PR is closed, no action required").logMessage.raw }; + } else if (!(await canPerformReview(context))) { + return { status: 200, reason: logger.info("Cannot perform review at this time").logMessage.raw }; + } else if (await hasCollaboratorConvertedPr(context)) { + return { status: 200, reason: logger.info("Collaborator has converted the PR, no action required").logMessage.raw }; + } + + return await handleCodeReview(context); +} + +export async function handleCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">): Promise { + const { + logger, + payload, + config: { model }, + env: { UBIQUITY_OS_APP_NAME }, + } = context; + let { + repository: { + owner: { login: repoOwner }, + name: repoName, + }, + } = payload; + const taskSpec = await getTaskSpecFromPullRequest(context, repoOwner, repoName); + + repoOwner = "ubiquity-os-marketplace"; // remove after QA + repoName = "command-ask"; // remove after QA + const prDiff = await fetchPullRequestDiff(context, repoOwner, repoName, 11 /* remove after QA*/); + if (!prDiff) { + throw logger.error("PR Diff not found"); + } + + const creationOptions = { + systemMessage: PULL_PRECHECK_SYSTEM_MESSAGE, + prompt: "What's missing compared to the spec?", + model, + additionalContext: [prDiff, taskSpec], + localContext: [], + groundTruths: await findGroundTruths(context, taskSpec), + botName: UBIQUITY_OS_APP_NAME, + }; + + const llmResponse = await handleChat(context, [ + { role: "system", content: creationOptions.systemMessage }, + { role: "user", content: creationOptions.prompt }, + ]); + console.log(creationOptions, llmResponse); + return { status: 200, reason: "Success" }; + // const llmResponse = await context.adapters.openai.completions.createCompletion(creationOptions); + // return handleLlmQueryOutput(context, llmResponse); +} diff --git a/src/helpers/callback-proxy.ts b/src/helpers/callback-proxy.ts index a50da5e..d001983 100644 --- a/src/helpers/callback-proxy.ts +++ b/src/helpers/callback-proxy.ts @@ -1,4 +1,5 @@ import { issueCommentCreatedCallback } from "../handlers/comment-created-callback"; +import { performPullPrecheck } from "../handlers/pull-precheck"; import { Context, SupportedEventsU } from "../types"; import { ProxyCallbacks } from "../types/proxy"; import { bubbleUpErrorComment } from "./errors"; @@ -12,6 +13,8 @@ import { bubbleUpErrorComment } from "./errors"; */ const callbacks = { "issue_comment.created": [issueCommentCreatedCallback], + "pull_request.opened": [performPullPrecheck], + "pull_request.ready_for_review": [performPullPrecheck], } as ProxyCallbacks; /** diff --git a/src/helpers/errors.ts b/src/helpers/errors.ts index 52dae30..271f8e2 100644 --- a/src/helpers/errors.ts +++ b/src/helpers/errors.ts @@ -1,6 +1,6 @@ import { LogReturn, Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Context } from "../types"; -import { addCommentToIssue } from "../handlers/add-comment"; +// import { addCommentToIssue } from "../handlers/add-comment"; export const logger = new Logs("debug"); export function handleUncaughtError(error: unknown) { @@ -24,7 +24,7 @@ export async function bubbleUpErrorComment(context: Context, err: unknown, post } if (post) { - await addCommentToIssue(context, `${errorMessage?.logMessage.diff}\n`); + // await addCommentToIssue(context, `${errorMessage?.logMessage.diff}\n`); } return errorMessage; diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index ecb2b38..098d9a8 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -1,79 +1,103 @@ import { Context } from "../types"; -import { StreamlinedComment, StreamlinedComments } from "../types/llm"; +import { StreamlinedComment, StreamlinedComments, TokenLimits } from "../types/llm"; import { createKey, streamlineComments } from "../handlers/comments"; -import { fetchPullRequestDiff, fetchIssue, fetchIssueComments, fetchLinkedPullRequests } from "./issue-fetching"; -import { splitKey } from "./issue"; - -/** - * Formats the chat history by combining streamlined comments and specifications or bodies for issues and pull requests. - * - * @param context - The context object containing information about the current GitHub event. - * @param streamlined - A record of streamlined comments for each issue or pull request. - * @param specAndBodies - A record of specifications or bodies for each issue or pull request. - * @returns A promise that resolves to a formatted string representing the chat history. - */ +import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; +import { pullReadmeFromRepoForIssue, splitKey } from "./issue"; +import { logger } from "./errors"; + export async function formatChatHistory( - context: Context, + context: Context<"issue_comment.created">, streamlined: Record, specAndBodies: Record ): Promise { + // At this point really we should have all the context we can obtain but we try again just in case const keys = new Set([...Object.keys(streamlined), ...Object.keys(specAndBodies), createKey(context.payload.issue.html_url)]); - let runningTokenCount = 0; + const tokenLimits: TokenLimits = { + modelMaxTokenLimit: context.adapters.openai.completions.getModelMaxTokenLimit(context.config.model), + maxCompletionTokens: context.config.maxTokens || context.adapters.openai.completions.getModelMaxOutputLimit(context.config.model), + runningTokenCount: 0, + tokensRemaining: 0, + }; + + // what we start out with + tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens; + + // careful adding any more API calls here as it's likely to hit the secondary rate limit const chatHistory = await Promise.all( - Array.from(keys).map(async (key) => { - const isCurrentIssue = key === createKey(context.payload.issue.html_url); - const [currentTokenCount, result] = await createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue, runningTokenCount); - runningTokenCount += currentTokenCount; + // keys are owner/repo/issueNum; so for each issue, we want to create a block + Array.from(keys).map(async (key, i) => { + // if we run out of tokens, we should stop + if (tokenLimits.tokensRemaining < 0) { + logger.error(`Ran out of tokens at block ${i}`); + return ""; + } + const [currentTokenCount, result] = await createContextBlockSection({ + context, + key, + streamlined, + specAndBodies, + isCurrentIssue: key === createKey(context.payload.issue.html_url), + tokenLimits, + }); + // update the token count + tokenLimits.runningTokenCount = currentTokenCount; + tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens - currentTokenCount; return result; }) ); - return Array.from(new Set(chatHistory)); + + return Array.from(new Set(chatHistory)).filter((x): x is string => !!x); } -/** - * Generates the correct header string based on the provided parameters. - * - * @param prDiff - The pull request diff string, if available. - * @param issueNumber - The issue number. - * @param isCurrentIssue - A boolean indicating if this is the current issue. - * @param isBody - A boolean indicating if this is for the body of the issue. - * @returns The formatted header string. - */ -function getCorrectHeaderString(prDiff: string | null, issueNumber: number, isCurrentIssue: boolean, isBody: boolean) { - const headerTemplates = { - pull: `Pull #${issueNumber} Request`, - issue: `Issue #${issueNumber} Specification`, - convo: `Issue #${issueNumber} Conversation`, +// These give structure and provide the distinction between the different sections of the chat history +function getCorrectHeaderString(prDiff: string | null, isCurrentIssue: boolean, isConvo: boolean) { + const strings = { + convo: { + pull: { + linked: `Linked Pull Request Conversation`, + current: `Current Pull Request Conversation`, + }, + issue: { + linked: `Linked Task Conversation`, + current: `Current Task Conversation`, + }, + }, + spec: { + pull: { + linked: `Linked Pull Request Specification`, + current: `Current Pull Request Specification`, + }, + issue: { + linked: `Linked Task Specification`, + current: `Current Task Specification`, + }, + }, }; - const type = prDiff ? "pull" : "issue"; - const context = isCurrentIssue ? "current" : "linked"; - const bodyContext = isBody ? "convo" : type; - - return `${context.charAt(0).toUpperCase() + context.slice(1)} ${headerTemplates[bodyContext]}`; + const category = isConvo ? "convo" : "spec"; + const issueType = prDiff ? "pull" : "issue"; + const issueStatus = isCurrentIssue ? "current" : "linked"; + return strings[category][issueType][issueStatus]; } -/** - * Creates a context block section for the given issue or pull request. - * - * @param context - The context object containing information about the current GitHub event. - * @param key - The unique key representing the issue or pull request. - * @param streamlined - A record of streamlined comments for each issue or pull request. - * @param specAndBodies - A record of specifications or bodies for each issue or pull request. - * @param isCurrentIssue - A boolean indicating whether the key represents the current issue. - * @returns A formatted string representing the context block section. - */ -async function createContextBlockSection( - context: Context, - key: string, - streamlined: Record, - specAndBodies: Record, - isCurrentIssue: boolean, - currentContextTokenCount: number = 0 -): Promise<[number, string]> { - const maxTokens = context.config.maxTokens; +async function createContextBlockSection({ + context, + key, + streamlined, + specAndBodies, + isCurrentIssue, + tokenLimits, +}: { + context: Context; + key: string; + streamlined: Record; + specAndBodies: Record; + isCurrentIssue: boolean; + tokenLimits: TokenLimits; +}): Promise<[number, string]> { let comments = streamlined[key]; - if (!comments || comments.length === 0) { + // just in case we try again but we should already have the comments + if (!comments || !comments.length) { const [owner, repo, number] = splitKey(key); const { comments: fetchedComments } = await fetchIssueComments({ context, @@ -83,25 +107,18 @@ async function createContextBlockSection( }); comments = streamlineComments(fetchedComments)[key]; } + const [org, repo, issueNum] = key.split("/"); const issueNumber = parseInt(issueNum); if (!issueNumber || isNaN(issueNumber)) { throw context.logger.error("Issue number is not valid"); } - const pulls = (await fetchLinkedPullRequests(org, repo, issueNumber, context)) || []; - const prDiffs = await Promise.all(pulls.map((pull) => fetchPullRequestDiff(context, org, repo, pull.number))); - let prDiff: string | null = null; - for (const pullDiff of prDiffs.flat()) { - if (currentContextTokenCount > maxTokens) break; - if (pullDiff) { - const tokenLength = await context.adapters.openai.completions.findTokenLength(pullDiff.diff); - if (currentContextTokenCount + tokenLength > maxTokens) break; - currentContextTokenCount += tokenLength; - prDiff = (prDiff ? prDiff + "\n" : "") + pullDiff.diff; - } - } - const specHeader = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, false); + + // Fetch our diff if we have one; this excludes the largest of files to keep within token limits + const { diff } = await fetchPullRequestDiff(context, org, repo, issueNumber, tokenLimits); + // specification or pull request body let specOrBody = specAndBodies[key]; + // we should have it already but just in case if (!specOrBody) { specOrBody = ( @@ -113,61 +130,75 @@ async function createContextBlockSection( }) )?.body || "No specification or body available"; } - const specOrBodyBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader)]; - currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(specOrBody); - const header = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, true); - const repoString = `${org}/${repo} #${issueNumber}`; - const block = [specOrBodyBlock.join(""), createHeader(header, repoString), createComment({ issueNumber, repo, org, comments }), createFooter(header)]; - currentContextTokenCount += await context.adapters.openai.completions.findTokenLength(block.join(" ")); - if (!prDiff) { - return [currentContextTokenCount, block.join("")]; + + const specHeader = getCorrectHeaderString(diff, isCurrentIssue, false); //E.g: === Current Task Specification === + const blockHeader = getCorrectHeaderString(diff, isCurrentIssue, true); //E.g: === Linked Task Conversation === + + // contains the actual spec or body + const specBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader, key)]; + // contains the conversation + const commentSection = createComment({ issueNumber, repo, org, comments }, specOrBody); + + let block; + // if we have a conversation, we should include it + if (commentSection) { + block = [specBlock.join("\n"), createHeader(blockHeader, key), commentSection, createFooter(blockHeader, key)]; + } else { + // No need for empty sections in the chat history + block = [specBlock.join("\n")]; + } + + // only inject the README if this is the current issue as that's likely most relevant + if (isCurrentIssue) { + const readme = await pullReadmeFromRepoForIssue({ context, owner: org, repo }); + // give the readme it's own clear section + if (readme) { + const readmeBlock = readme ? [createHeader("README", key), createSpecOrBody(readme), createFooter("README", key)] : []; + block = block.concat(readmeBlock); + } } - const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), prDiff, createFooter("\nLinked Pull Request Code Diff")]; - return [currentContextTokenCount, block.join("") + diffBlock.join("")]; + + if (!diff) { + // the diff was already encoded etc but we have added more to the block so we need to re-encode + return [await context.adapters.openai.completions.findTokenLength(block.join("")), block.join("\n")]; + } + + // Build the block with the diff in it's own section + const blockWithDiff = [block.join("\n"), createHeader(`Pull Request Diff`, key), diff, createFooter(`Pull Request Diff`, key)]; + return [await context.adapters.openai.completions.findTokenLength(blockWithDiff.join("")), blockWithDiff.join("\n")]; } -/** - * Creates a header string for the given content and repository string. - * - * @param content - The content to include in the header. - * @param repoString - The repository string to include in the header. - * @returns A formatted header string. - */ function createHeader(content: string, repoString: string) { - return `=== ${content} === ${repoString} ===\n\n`; + return `=== ${content} === ${repoString} ===\n`; +} + +function createFooter(content: string, repoString: string) { + return `=== End ${content} === ${repoString} ===\n`; } -/** - * Creates a footer string for the given content. - * - * @param content - The content to include in the footer. - * @returns A formatted footer string. - */ -function createFooter(content: string) { - return `=== End ${content} ===\n\n`; +function createSpecOrBody(specOrBody: string) { + return `${specOrBody}\n`; } -/** - * Creates a comment string from the StreamlinedComments object. - * - * @param comment - The StreamlinedComments object. - * @returns A string representing the comments. - */ -function createComment(comment: StreamlinedComments) { +function createComment(comment: StreamlinedComments, specOrBody: string) { if (!comment.comments) { - return ""; + return null; } - // Format comments + + const seen = new Set(); + comment.comments = comment.comments.filter((c) => { + // Do not include the same comment twice or the spec/body + if (seen.has(c.id) || c.body === specOrBody) { + return false; + } + seen.add(c.id); + return true; + }); + const formattedComments = comment.comments.map((c) => `${c.id} ${c.user}: ${c.body}\n`); - return formattedComments.join(""); -} -/** - * Creates a formatted string for the specification or body of an issue. - * - * @param specOrBody - The specification or body content. - * @returns A formatted string representing the specification or body. - */ -function createSpecOrBody(specOrBody: string) { - return `${specOrBody}\n`; + if (formattedComments.length === 0) { + return; + } + return formattedComments.join(""); } diff --git a/src/helpers/get-issue-no-from-payload.ts b/src/helpers/get-issue-no-from-payload.ts new file mode 100644 index 0000000..006289f --- /dev/null +++ b/src/helpers/get-issue-no-from-payload.ts @@ -0,0 +1,34 @@ +import { Context } from "../types"; +import { FetchParams } from "../types/github-types"; +import { logger } from "./errors"; + +export function getIssueNumberFromPayload(payload: Context["payload"], fetchParams?: FetchParams): number { + let issueNumber, owner, repo; + + if (!issueNumber) { + if ("issue" in payload) { + issueNumber = payload.issue.number; + } + + if (!issueNumber && "pull_request" in payload) { + issueNumber = payload.pull_request.number; + } + } + + // takes precedence and overrides the payload + if (fetchParams) { + owner = fetchParams.owner; + repo = fetchParams.repo; + issueNumber = fetchParams.issueNum; + } + + if (!issueNumber) { + throw logger.error(`Error fetching issue`, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNumber, + }); + } + + return issueNumber; +} diff --git a/src/helpers/get-owner-repo-issue-from-url.ts b/src/helpers/get-owner-repo-issue-from-url.ts new file mode 100644 index 0000000..24e22f0 --- /dev/null +++ b/src/helpers/get-owner-repo-issue-from-url.ts @@ -0,0 +1,13 @@ +export function getOwnerRepoIssueNumberFromUrl(body: string | undefined | null): { owner: string; repo: string; issueNumber: string } | null { + if (!body) return null; + + const regex = /https:\/\/(www\.)?github.com\/(?[\w-]+)\/(?[\w-]+)\/issues\/(?\d+)/i; + const match = body.match(regex); + + if (match && match.groups) { + const { owner, repo, issueNumber } = match.groups; + return { owner, repo, issueNumber }; + } + + return null; +} diff --git a/src/helpers/gql-functions.ts b/src/helpers/gql-functions.ts new file mode 100644 index 0000000..d555412 --- /dev/null +++ b/src/helpers/gql-functions.ts @@ -0,0 +1,53 @@ +import { Octokit } from "@octokit/rest"; +import { closedByPullRequestsReferences, IssuesClosedByThisPr } from "./gql-queries"; + +export async function checkIfPrClosesIssues( + octokit: Octokit, + pr: { + owner: string; + repo: string; + pr_number: number; + } +) { + const { owner, repo, pr_number } = pr; + + if (!pr_number) { + throw new Error("[checkIfPrClosesIssues]: pr_number is required"); + } + try { + const result = await octokit.graphql(closedByPullRequestsReferences, { + owner, + repo, + pr_number, + }); + + const closingIssues = result.repository.pullRequest.closingIssuesReferences.edges.map((edge) => ({ + number: edge.node.number, + title: edge.node.title, + url: edge.node.url, + body: edge.node.body, + repository: { + name: edge.node.name, + owner: edge.node.owner, + }, + })); + + if (closingIssues.length > 0) { + return { + closesIssues: true, + issues: closingIssues, + }; + } else { + return { + closesIssues: false, + issues: [], + }; + } + } catch (error) { + console.error("Error fetching closing issues:", error); + return { + closesIssues: false, + issues: [], + }; + } +} diff --git a/src/helpers/gql-queries.ts b/src/helpers/gql-queries.ts new file mode 100644 index 0000000..97cd4f7 --- /dev/null +++ b/src/helpers/gql-queries.ts @@ -0,0 +1,40 @@ +import { PullRequest } from "@octokit/graphql-schema"; + +type ClosedByPullRequestsReferences = { + node: Pick & { owner: string; name: string }; +}; + +export type IssuesClosedByThisPr = { + repository: { + pullRequest: { + closingIssuesReferences: { + edges: ClosedByPullRequestsReferences[]; + }; + }; + }; +}; + +export const closedByPullRequestsReferences = /* GraphQL */ ` + query closingIssuesReferencesQuery($owner: String!, $repo: String!, $pr_number: Int!) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $pr_number) { + closingIssuesReferences(first: 100) { + edges { + node { + number + title + url + body + repository { + name + owner { + login + } + } + } + } + } + } + } + } +`; diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 9f96d7b..ee355b0 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,53 +1,41 @@ -import { GithubDiff } from "github-diff-tool"; import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; -import { IssueComments, FetchParams, Issue, LinkedIssues, LinkedPullsToIssue, ReviewComments, SimplifiedComment } from "../types/github-types"; -import { StreamlinedComment } from "../types/llm"; +import { IssueComments, FetchParams, Issue, LinkedIssues, ReviewComments, SimplifiedComment } from "../types/github-types"; +import { StreamlinedComment, TokenLimits } from "../types/llm"; import { logger } from "./errors"; -import { - dedupeStreamlinedComments, - fetchCodeLinkedFromIssue, - idIssueFromComment, - mergeStreamlinedComments, - pullReadmeFromRepoForIssue, - splitKey, -} from "./issue"; +import { getIssueNumberFromPayload } from "./get-issue-no-from-payload"; +import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; +import { processPullRequestDiff } from "./pull-request-parsing"; -/** - * Recursively fetches linked issues and processes them, including fetching comments and specifications. - * - * @param params - The parameters required to fetch the linked issues, including context and other details. - * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. - */ export async function recursivelyFetchLinkedIssues(params: FetchParams) { + // take a first run at gathering everything we need and package it up const { linkedIssues, seen, specAndBodies, streamlinedComments } = await fetchLinkedIssues(params); + // build promises and throttle them; this calls handleSpec which is a recursive function potentially to great depth const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); await throttlePromises(fetchPromises, 10); + // handle the keys that have been gathered const linkedIssuesKeys = linkedIssues.map((issue) => createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`)); + // exhaustive list of unique keys from the first full pass const specAndBodyKeys = Array.from(new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); + // this fn throttles from within but again, be weary of the rate limit await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); return { linkedIssues, specAndBodies, streamlinedComments }; } -/** - * Fetches linked issues recursively and processes them. - * - * @param params - The parameters required to fetch the linked issues, including context and other details. - * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. - */ export async function fetchLinkedIssues(params: FetchParams) { - const { comments, issue } = await fetchIssueComments(params); - if (!issue) { + const fetchedIssueAndComments = await fetchIssueComments(params); + if (!fetchedIssueAndComments.issue) { return { streamlinedComments: {}, linkedIssues: [], specAndBodies: {}, seen: new Set() }; } - if (!issue.body || !issue.html_url) { - throw logger.error("Issue body or URL not found", { issueUrl: issue.html_url }); - } if (!params.owner || !params.repo) { throw logger.error("Owner or repo not found"); } + + const issue = fetchedIssueAndComments.issue; + const comments = fetchedIssueAndComments.comments.filter((comment) => comment.body !== undefined); + const issueKey = createKey(issue.html_url); const [owner, repo, issueNumber] = splitKey(issueKey); const linkedIssues: LinkedIssues[] = [{ body: issue.body, comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; @@ -63,37 +51,16 @@ export async function fetchLinkedIssues(params: FetchParams) { issueUrl: issue.html_url, }); - //Fetch the README of the repository - try { - const readme = await pullReadmeFromRepoForIssue(params); - if (readme) { - comments.push({ - body: readme, - user: issue.user, - id: issue.id.toString(), - org: params.owner, - repo: params.repo, - issueUrl: issue.html_url, - }); - } - } catch (error) { - params.context.logger.error(`Error fetching README`, { - err: error, - owner, - repo, - issue, - }); - } - for (const comment of comments) { - const foundIssues = idIssueFromComment(comment.body); + const foundIssues = idIssueFromComment(comment.body, params); const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; + if (foundIssues) { for (const linkedIssue of foundIssues) { const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); if (seen.has(linkedKey)) continue; - seen.add(linkedKey); + const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ context: params.context, issueNum: linkedIssue.issueNumber, @@ -130,15 +97,6 @@ export async function fetchLinkedIssues(params: FetchParams) { return { streamlinedComments, linkedIssues, specAndBodies, seen }; } -/** - * Merges comments and fetches the specification for a linked issue. - * - * @param params - The parameters required to fetch the linked issue, including context and other details. - * @param linkedIssue - The linked issue for which comments and specifications need to be fetched. - * @param streamlinedComments - A record of streamlined comments associated with issues. - * @param specOrBodies - A record of specifications or bodies associated with issues. - * @param seen - A set of issue keys that have already been processed to avoid duplication. - */ export async function mergeCommentsAndFetchSpec( params: FetchParams, linkedIssue: LinkedIssues, @@ -151,74 +109,43 @@ export async function mergeCommentsAndFetchSpec( const merged = mergeStreamlinedComments(streamlinedComments, streamed); streamlinedComments = { ...streamlinedComments, ...merged }; } + if (linkedIssue.body) { await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); } } -/** - * Fetches the diff of a pull request. - * - * @param context - The context containing the octokit instance and logger. - * @param org - The organization or owner of the repository. - * @param repo - The name of the repository. - * @param issue - The pull request number. - * @returns A promise that resolves to the diff of the pull request as a string, or null if an error occurs. - */ -export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number): Promise<{ diff: string; diffSize: number }[] | null> { - const { octokit, logger } = context; +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number, tokenLimits: TokenLimits) { + const { octokit } = context; + let diff: string; + try { - const githubDiff = new GithubDiff(octokit); - //Fetch the statistics of the pull request - const stats = await githubDiff.getPullRequestStats(org, repo, issue); - const files = stats.map((file) => ({ filename: file.filename, diffSizeInBytes: file.diffSizeInBytes })); - //Fetch the diff of the files - const prDiffs = await Promise.all( - files.map(async (file) => { - let diff = null; - try { - diff = await githubDiff.getPullRequestDiff({ - owner: org, - repo, - pullNumber: issue, - filePath: file.filename, - }); - } catch { - logger.error(`Error fetching pull request diff for the file`, { - owner: org, - repo, - pull_number: issue, - file: file.filename, - }); - } - return diff ? { diff: file.filename + diff, diffSize: file.diffSizeInBytes } : null; - }) - ); - return prDiffs.filter((diff): diff is { diff: string; diffSize: number } => diff !== null).sort((a, b) => a.diffSize - b.diffSize); - } catch (error) { - logger.error(`Error fetching pull request diff`, { - err: error, + const diffResponse = await octokit.pulls.get({ owner: org, repo, pull_number: issue, + mediaType: { format: "diff" }, }); - return null; + + diff = diffResponse.data as unknown as string; + } catch (e) { + logger.error(`Error fetching PR data`, { owner: org, repo, issue, err: String(e) }); + return { diff: null }; } + + return await processPullRequestDiff(diff, tokenLimits); } -/** - * Fetches an issue from the GitHub API. - * @param params - Context - * @returns A promise that resolves to an issue object or null if an error occurs. - */ export async function fetchIssue(params: FetchParams): Promise { const { octokit, payload, logger } = params.context; - const { issueNum, owner, repo } = params; + const { owner, repo } = params; + const issueNumber = getIssueNumberFromPayload(payload, params); + try { const response = await octokit.rest.issues.get({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); return response.data; } catch (error) { @@ -226,7 +153,7 @@ export async function fetchIssue(params: FetchParams): Promise { err: error, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); return null; } @@ -240,8 +167,10 @@ export async function fetchIssue(params: FetchParams): Promise { */ export async function fetchIssueComments(params: FetchParams) { const { octokit, payload, logger } = params.context; - const { issueNum, owner, repo } = params; + const { owner, repo } = params; const issue = await fetchIssue(params); + const issueNumber = getIssueNumberFromPayload(payload, params); + let reviewComments: ReviewComments[] = []; let issueComments: IssueComments[] = []; try { @@ -249,14 +178,22 @@ export async function fetchIssueComments(params: FetchParams) { const response = await octokit.rest.pulls.listReviewComments({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - pull_number: issueNum || payload.issue.number, + pull_number: issueNumber, }); reviewComments = response.data; + + const response2 = await octokit.rest.issues.listComments({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNumber || ("issue" in payload ? payload.issue.number : 0), + }); + + issueComments = response2.data; } else { const response = await octokit.rest.issues.listComments({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); issueComments = response.data; } @@ -265,7 +202,7 @@ export async function fetchIssueComments(params: FetchParams) { e, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, + issue_number: issueNumber, }); } const comments = [...issueComments, ...reviewComments].filter((comment) => comment.user?.type !== "Bot"); @@ -277,15 +214,6 @@ export async function fetchIssueComments(params: FetchParams) { }; } -/** - * Fetches and handles an issue based on the provided key and parameters. - * - * @param key - The unique key representing the issue in the format "owner/repo/issueNumber". - * @param params - The parameters required to fetch the issue, including context and other details. - * @param streamlinedComments - A record of streamlined comments associated with issues. - * @param seen - A set of issue keys that have already been processed to avoid duplication. - * @returns A promise that resolves to an array of streamlined comments for the specified issue. - */ export async function fetchAndHandleIssue( key: string, params: FetchParams, @@ -317,54 +245,17 @@ function castCommentsToSimplifiedComments(comments: (IssueComments | ReviewComme }; } - if ("issue_url" in comment) { + if ("html_url" in comment) { return { body: comment.body, user: comment.user, id: comment.id.toString(), org: params.owner || params.context.payload.repository.owner.login, repo: params.repo || params.context.payload.repository.name, - issueUrl: comment.issue_url, + issueUrl: comment.html_url, }; } throw logger.error("Comment type not recognized", { comment, params }); }); } - -export async function fetchLinkedPullRequests(owner: string, repo: string, issueNumber: number, context: Context) { - const query = ` - query($owner: String!, $repo: String!, $issueNumber: Int!) { - repository(owner: $owner, name: $repo) { - issue(number: $issueNumber) { - closedByPullRequestsReferences(first: 100) { - nodes { - number - title - state - merged - url - } - } - } - } - } - `; - - try { - const { repository } = await context.octokit.graphql(query, { - owner, - repo, - issueNumber, - }); - return repository.issue.closedByPullRequestsReferences.nodes; - } catch (error) { - context.logger.error(`Error fetching linked PRs from issue`, { - err: error, - owner, - repo, - issueNumber, - }); - return null; - } -} diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 3f44225..779cb26 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -4,14 +4,6 @@ import { StreamlinedComment } from "../types/llm"; import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, mergeCommentsAndFetchSpec } from "./issue-fetching"; -/** - * Handles the processing of an issue. - * - * @param params - The parameters required to fetch and handle issues. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @param alreadySeen - A set of keys that have already been processed to avoid duplication. - * @returns A promise that resolves when the issue has been handled. - */ export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen: Set) { if (alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { return; @@ -22,17 +14,6 @@ export async function handleIssue(params: FetchParams, streamlinedComments: Reco return mergeStreamlinedComments(streamlinedComments, streamlined); } -/** - * Handles the processing of a specification or body text. - * - * @param params - The parameters required to fetch and handle issues. - * @param specOrBody - The specification or body text to be processed. - * @param specAndBodies - A record of specifications and bodies indexed by keys. - * @param key - The key associated with the current specification or body. - * @param seen - A set of keys that have already been processed to avoid duplication. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @returns A promise that resolves to the updated record of specifications and bodies. - */ export async function handleSpec( params: FetchParams, specOrBody: string, @@ -42,7 +23,7 @@ export async function handleSpec( streamlinedComments: Record ) { specAndBodies[key] = specOrBody; - const otherReferences = idIssueFromComment(specOrBody); + const otherReferences = idIssueFromComment(specOrBody, params); if (otherReferences) { for (const ref of otherReferences) { const anotherKey = createKey(ref.url, ref.issueNumber); @@ -73,21 +54,13 @@ export async function handleSpec( return specAndBodies; } -/** - * Handles the processing of a comment. - * - * @param params - The parameters required to fetch and handle issues. - * @param comment - The comment to be processed. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @param seen - A set of keys that have already been processed to avoid duplication. - */ export async function handleComment( params: FetchParams, comment: StreamlinedComment, streamlinedComments: Record, seen: Set ) { - const otherReferences = idIssueFromComment(comment.body); + const otherReferences = idIssueFromComment(comment.body, params); if (otherReferences) { for (const ref of otherReferences) { const key = createKey(ref.url); @@ -100,15 +73,8 @@ export async function handleComment( } } -/** - * Handles the processing of specification and body keys. - * - * @param keys - An array of keys representing issues or comments to be processed. - * @param params - The parameters required to fetch and handle issues. - * @param streamlinedComments - A record of streamlined comments indexed by keys. - * @param seen - A set of keys that have already been processed to avoid duplication. - */ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { + // Make one last sweep just to be sure we have everything const commentProcessingPromises = keys.map(async (key) => { let comments = streamlinedComments[key]; if (!comments || comments.length === 0) { @@ -122,12 +88,6 @@ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, await throttlePromises(commentProcessingPromises, 10); } -/** - * Throttles the execution of promises to ensure that no more than the specified limit are running concurrently. - * - * @param promises - An array of promises to be executed. - * @param limit - The maximum number of promises to run concurrently. - */ export async function throttlePromises(promises: Promise[], limit: number) { const executing: Promise[] = []; for (const promise of promises) { diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index b63b7d5..2c4697b 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -58,8 +58,8 @@ export function splitKey(key: string): [string, string, string] { * @param params - Additional parameters that may include context information. * @returns An array of linked issues or null if no issues are found. */ -export function idIssueFromComment(comment?: string | null): LinkedIssues[] | null { - const urlMatch = comment?.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+)\/(pull|issues?)\/(\d+)/g); +export function idIssueFromComment(comment?: string | null, params?: FetchParams): LinkedIssues[] | null { + const urlMatch = comment?.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/g); const response: LinkedIssues[] = []; if (urlMatch) { @@ -68,6 +68,24 @@ export function idIssueFromComment(comment?: string | null): LinkedIssues[] | nu }); } + /** + * These can only reference issues within the same repository + * so params works here + */ + const hashMatch = comment?.match(/#(\d+)/g); + if (hashMatch && hashMatch.length > 0) { + hashMatch.forEach((hash) => { + const issueNumber = hash.replace("#", ""); + // the HTML comment in the PR template + if (issueNumber === "1234" && comment?.includes("You must link the issue number e.g.")) { + return; + } + const owner = params?.context.payload.repository?.owner?.login || ""; + const repo = params?.context.payload.repository?.name || ""; + response.push({ body: undefined, owner, repo, issueNumber: parseInt(issueNumber), url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` }); + }); + } + return response.length > 0 ? response : null; } @@ -151,7 +169,7 @@ export async function fetchCodeLinkedFromIssue( return { body: content, id: parsedUrl.path }; } } catch (error) { - console.error(`Error fetching content from ${url}:`, error); + logger.error(`Error fetching content from ${url}:`, { er: error }); } return null; }) diff --git a/src/helpers/pull-helpers/can-perform-review.ts b/src/helpers/pull-helpers/can-perform-review.ts new file mode 100644 index 0000000..d0d38b6 --- /dev/null +++ b/src/helpers/pull-helpers/can-perform-review.ts @@ -0,0 +1,38 @@ +import { Context } from "../../types"; + +export async function canPerformReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">) { + const { logger, payload } = context; + const { number, organization, repository, action } = payload; + const { owner, name } = repository; + + logger.info(`${organization}/${repository}#${number} - ${action}`); + + const timeline = await context.octokit.issues.listEvents({ + owner: owner.login, + repo: name, + issue_number: number, + }); + + const reviews = timeline.data.filter((event) => event.event === "reviewed"); + const botReviews = reviews.filter((review) => review.actor.type === "Bot"); + + if (!botReviews.length) { + logger.info("No bot reviews found"); + return true; + } + + const lastReview = botReviews[botReviews.length - 1]; + const lastReviewDate = new Date(lastReview.created_at); + const now = new Date(); + + const diff = now.getTime() - lastReviewDate.getTime(); + const ONE_DAY = 24 * 60 * 60 * 1000; + + if (diff < ONE_DAY) { + throw logger.error("Only one review per day is allowed"); + } + + logger.info("One review per day check passed"); + + return true; +} diff --git a/src/helpers/pull-helpers/convert-pull-to-draft.ts b/src/helpers/pull-helpers/convert-pull-to-draft.ts new file mode 100644 index 0000000..d88b209 --- /dev/null +++ b/src/helpers/pull-helpers/convert-pull-to-draft.ts @@ -0,0 +1,27 @@ +import { Octokit } from "@octokit/rest"; + +export async function convertPullToDraft( + shouldConvert: boolean, + params: { + owner: string; + repo: string; + pull_number: number; + octokit: Octokit; + } +) { + if (!shouldConvert) { + return `No action taken. The pull request will remain in its current state.`; + } + const { owner, repo, pull_number } = params; + try { + await params.octokit.pulls.update({ + owner, + repo, + pull_number, + draft: true, + }); + return `Successfully converted pull request to draft mode.`; + } catch (err) { + return `Failed to convert pull request to draft mode: ${JSON.stringify(err)}`; + } +} diff --git a/src/helpers/pull-helpers/get-context-if-no-spec.ts b/src/helpers/pull-helpers/get-context-if-no-spec.ts new file mode 100644 index 0000000..cc389a9 --- /dev/null +++ b/src/helpers/pull-helpers/get-context-if-no-spec.ts @@ -0,0 +1,42 @@ +import { Context } from "../../types"; +import { logger } from "../errors"; +import { formatChatHistory } from "../format-chat-history"; +import { recursivelyFetchLinkedIssues } from "../issue-fetching"; + +export async function getContextIfNoSpecFound( + context: Context<"pull_request.opened" | "pull_request.ready_for_review">, + owner: string, + repo: string, + issueNumber: number +) { + logger.info(`No spec found for PR #${issueNumber} in ${owner}/${repo}`); + const { data: prAsIssue } = await context.octokit.issues.get({ + owner, + repo, + issue_number: 11, // remove after QA + }); + const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ + context, + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + issueNum: context.payload.pull_request.number, + }); + const formattedChat = await formatChatHistory( + { + ...context, + eventName: "issue_comment.created", + payload: { + ...context.payload, + action: "created", + issue: prAsIssue as Context<"issue_comment.created">["payload"]["issue"], + comment: { body: prAsIssue.body } as Context<"issue_comment.created">["payload"]["comment"], + sender: { login: prAsIssue.user?.login } as Context<"issue_comment.created">["payload"]["sender"], + repository: { owner: { login: owner }, name: repo } as Context<"issue_comment.created">["payload"]["repository"], + } as Context<"issue_comment.created">["payload"], + }, + streamlinedComments, + specAndBodies + ); + + return formattedChat.join(""); +} diff --git a/src/helpers/pull-helpers/get-task-spec.ts b/src/helpers/pull-helpers/get-task-spec.ts new file mode 100644 index 0000000..06e4618 --- /dev/null +++ b/src/helpers/pull-helpers/get-task-spec.ts @@ -0,0 +1,61 @@ +import { Context } from "../../types"; +import { getOwnerRepoIssueNumberFromUrl } from "../get-owner-repo-issue-from-url"; +import { checkIfPrClosesIssues } from "../gql-functions"; +import { fetchIssue } from "../issue-fetching"; +import { getContextIfNoSpecFound } from "./get-context-if-no-spec"; + +export async function getTaskSpecFromPullRequest( + context: Context<"pull_request.opened" | "pull_request.ready_for_review">, + repoOwner: string, + repoName: string, + fallbackToConvo: boolean = false +) { + const { + payload: { pull_request }, + logger, + } = context; + let taskSpec; + let owner, repo, issueNumber; + + const { issues: closingIssues } = await checkIfPrClosesIssues(context.octokit, { + owner: pull_request.base.repo.owner.login, + repo: pull_request.base.repo.name, + pr_number: pull_request.number, + }); + + if (closingIssues.length === 0) { + const linkedViaBodyHash = pull_request.body?.match(/#(\d+)/g); + const urlMatch = getOwnerRepoIssueNumberFromUrl(pull_request.body); + + if (linkedViaBodyHash?.length) { + const issueNumber = linkedViaBodyHash[0].replace("#", ""); + const issue = await fetchIssue({ context, owner: repoOwner, repo: repoName, issueNum: Number(issueNumber) }); + taskSpec = issue?.body; + } + + if (urlMatch && !taskSpec) { + owner = urlMatch.owner; + repo = urlMatch.repo; + issueNumber = urlMatch.issueNumber; + const issue = await fetchIssue({ context, owner, repo, issueNum: Number(issueNumber) }); + taskSpec = issue?.body; + } + } else if (closingIssues.length > 1) { + throw logger.error("Multiple tasks linked to this PR, needs investigated to see how best to handle it.", { + closingIssues, + pull_request, + }); + } else { + taskSpec = closingIssues[0].body; + } + + if (!taskSpec) { + throw logger.error("Task spec not found", { pull_request }); + } + + if (!taskSpec && fallbackToConvo) { + taskSpec = await getContextIfNoSpecFound(context, repoOwner, repoName, pull_request.number); + } + + return taskSpec; +} diff --git a/src/helpers/pull-helpers/has-collaborator-converted.ts b/src/helpers/pull-helpers/has-collaborator-converted.ts new file mode 100644 index 0000000..e2c9001 --- /dev/null +++ b/src/helpers/pull-helpers/has-collaborator-converted.ts @@ -0,0 +1,33 @@ +import { Context } from "../../types"; + +export async function hasCollaboratorConvertedPr( + context: Context<"pull_request.opened" | "pull_request.ready_for_review" | "pull_request.converted_to_draft"> +) { + const { logger, payload, octokit } = context; + const { number, organization, repository, action, pull_request } = payload; + const { owner, name } = repository; + + logger.info(`${organization}/${repository}#${number} - ${action}`); + + const timeline = await context.octokit.issues.listEvents({ + owner: owner.login, + repo: name, + issue_number: number, + }); + + const usersThatConvertedToDraft = timeline.data.filter((event) => event.event === "converted_to_draft").map((event) => event.actor.login); + const usersThatReadiedForReview = timeline.data.filter((event) => event.event === "ready_for_review").map((event) => event.actor.login); + + const reviews = await octokit.pulls.listReviews({ + owner: owner.login, + repo: name, + pull_number: number, + }); + + const reviewers = reviews.data + .filter((review) => review.user?.type === "User" && review.author_association === "COLLABORATOR" && review.user?.login !== pull_request.user.login) + .map((review) => review.user?.login) + .filter((login): login is string => !!login); + + return reviewers?.some((reviewer) => usersThatConvertedToDraft.includes(reviewer) || usersThatReadiedForReview.includes(reviewer)); +} diff --git a/src/helpers/pull-helpers/submit-code-review.ts b/src/helpers/pull-helpers/submit-code-review.ts new file mode 100644 index 0000000..8995492 --- /dev/null +++ b/src/helpers/pull-helpers/submit-code-review.ts @@ -0,0 +1,24 @@ +import { Context } from "../../types"; +import { CodeReviewStatus } from "../../types/pull-requests"; + +export async function submitCodeReview(context: Context<"pull_request.opened" | "pull_request.ready_for_review">, review: string, status: CodeReviewStatus) { + const { logger, payload } = context; + const { number, sender, organization, repository, action } = payload; + const { owner, name } = repository; + + logger.info(`${organization}/${repository}#${number} - ${action} - ${sender.login} - ${review}`); + + try { + const response = await context.octokit.pulls.createReview({ + owner: owner.login, + repo: name, + pull_number: number, + body: review, + event: status, + }); + + logger.info(`Code review submitted: ${response.data.html_url}`); + } catch (er) { + throw logger.error("Failed to submit code review", { err: er }); + } +} diff --git a/src/helpers/pull-request-parsing.ts b/src/helpers/pull-request-parsing.ts new file mode 100644 index 0000000..87a86e6 --- /dev/null +++ b/src/helpers/pull-request-parsing.ts @@ -0,0 +1,104 @@ +import { encode } from "gpt-tokenizer"; +import { TokenLimits } from "../types/llm"; +import { logger } from "./errors"; +import { EncodeOptions } from "gpt-tokenizer/esm/GptEncoding"; + +export async function processPullRequestDiff(diff: string, tokenLimits: TokenLimits) { + const { runningTokenCount, tokensRemaining } = tokenLimits; + + // parse the diff into per-file diffs for quicker processing + const perFileDiffs = parsePerFileDiffs(diff); + + // quick estimate using a simple heuristic; 3.5 characters per token + const estimatedFileDiffStats = perFileDiffs.map(({ filename, diffContent }) => { + const estimatedTokenCount = Math.ceil(diffContent.length / 3.5); + return { filename, estimatedTokenCount, diffContent }; + }); + + estimatedFileDiffStats.sort((a, b) => a.estimatedTokenCount - b.estimatedTokenCount); // Smallest first + + let currentTokenCount = runningTokenCount; + const includedFileDiffs = []; + + // Using the quick estimate, include as many files as possible without exceeding token limits + for (const file of estimatedFileDiffStats) { + if (currentTokenCount + file.estimatedTokenCount > tokensRemaining) { + logger.info(`Skipping ${file.filename} to stay within token limits.`); + continue; + } + includedFileDiffs.push(file); + currentTokenCount += file.estimatedTokenCount; + } + + // If no files can be included, return null + if (includedFileDiffs.length === 0) { + logger.error(`Cannot include any files from diff without exceeding token limits.`); + return { diff: null }; + } + + // Accurately calculate token count for included files we have approximated to be under the limit + const accurateFileDiffStats = await Promise.all( + includedFileDiffs.map(async (file) => { + const tokenCountArray = await encodeAsync(file.diffContent, { disallowedSpecial: new Set() }); + const tokenCount = tokenCountArray.length; + return { ...file, tokenCount }; + }) + ); + + // Take an accurate reading of our current collection of files within the diff + currentTokenCount = accurateFileDiffStats.reduce((sum, file) => sum + file.tokenCount, runningTokenCount); + + // Remove files from the end of the list until we are within token limits + while (currentTokenCount > tokensRemaining && accurateFileDiffStats.length > 0) { + const removedFile = accurateFileDiffStats.pop(); + currentTokenCount -= removedFile?.tokenCount || 0; + logger.info(`Excluded ${removedFile?.filename || "Unknown filename"} after accurate token count exceeded limits.`); + } + + if (accurateFileDiffStats.length === 0) { + logger.error(`Cannot include any files from diff after accurate token count calculation.`); + return { diff: null }; + } + + // Build the diff with the included files + const currentDiff = accurateFileDiffStats.map((file) => file.diffContent).join("\n"); + + return { diff: currentDiff }; +} + +// Helper to speed up tokenization +export async function encodeAsync(text: string, options: EncodeOptions): Promise { + return new Promise((resolve) => { + const result = encode(text, options); + resolve(result); + }); +} + +// Helper to parse a diff into per-file diffs +export function parsePerFileDiffs(diff: string): { filename: string; diffContent: string }[] { + // regex to capture diff sections, including the last file + const diffPattern = /^diff --git a\/(.*?) b\/.*$/gm; + let match: RegExpExecArray | null; + const perFileDiffs = []; + let lastIndex = 0; + + // iterate over each file in the diff + while ((match = diffPattern.exec(diff)) !== null) { + const filename = match[1]; + const startIndex = match.index; + + // if we have pushed a file into the array, "append" the diff content + if (perFileDiffs.length > 0) { + perFileDiffs[perFileDiffs.length - 1].diffContent = diff.substring(lastIndex, startIndex).trim(); + } + + perFileDiffs.push({ filename, diffContent: "" }); + lastIndex = startIndex; + } + // append the last file's diff content + if (perFileDiffs.length > 0 && lastIndex < diff.length) { + perFileDiffs[perFileDiffs.length - 1].diffContent = diff.substring(lastIndex).trim(); + } + + return perFileDiffs; +} diff --git a/src/plugin.ts b/src/plugin.ts index 284b3cf..3bc13c8 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,13 +1,13 @@ import { Octokit } from "@octokit/rest"; import { PluginInputs } from "./types"; import { Context } from "./types"; -import { LogLevel, Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Env } from "./types/env"; import { createAdapters } from "./adapters"; import { createClient } from "@supabase/supabase-js"; import { VoyageAIClient } from "voyageai"; import OpenAI from "openai"; import { proxyCallbacks } from "./helpers/callback-proxy"; +import { logger } from "./helpers/errors"; export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); @@ -26,7 +26,7 @@ export async function plugin(inputs: PluginInputs, env: Env) { config: inputs.settings, octokit, env, - logger: new Logs("info" as LogLevel), + logger, adapters: {} as ReturnType, }; context.adapters = createAdapters(supabase, voyageClient, openaiClient, context); diff --git a/src/types/context.ts b/src/types/context.ts index 8588ad9..1cde31c 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -5,7 +5,7 @@ import { Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Env } from "./env"; import { createAdapters } from "../adapters"; -export type SupportedEventsU = "issue_comment.created"; +export type SupportedEventsU = "issue_comment.created" | "pull_request.opened" | "pull_request.ready_for_review" | "pull_request.converted_to_draft"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; diff --git a/src/types/github-types.ts b/src/types/github-types.ts index 2830da7..b5692de 100644 --- a/src/types/github-types.ts +++ b/src/types/github-types.ts @@ -19,12 +19,12 @@ export type LinkedIssues = { owner: string; url: string; comments?: SimplifiedComment[] | null | undefined; - body: string | undefined; + body: string | undefined | null; }; export type SimplifiedComment = { user: Partial | null; - body: string | undefined; + body: string | undefined | null; id: string; org: string; repo: string; @@ -40,21 +40,3 @@ export type FetchedCodes = { repo: string; issueNumber: number; }; - -export type FetchedPulls = { - number: number; - title: string; - state: string; - merged: boolean; - url: string; -}; - -export type LinkedPullsToIssue = { - repository: { - issue: { - closedByPullRequestsReferences: { - nodes: FetchedPulls[]; - }; - }; - }; -}; diff --git a/src/types/llm.ts b/src/types/llm.ts index f01a70d..7d5bedf 100644 --- a/src/types/llm.ts +++ b/src/types/llm.ts @@ -4,8 +4,8 @@ export type ModelApplications = "code-review" | "chat-bot"; type ChatBotAppParams = { languages: [string, number][]; - dependencies: Record; - devDependencies: Record; + dependencies: Record | null; + devDependencies: Record | null; }; type CodeReviewAppParams = { @@ -51,3 +51,10 @@ export type StreamlinedComments = { org: string; comments: StreamlinedComment[]; }; + +export type TokenLimits = { + modelMaxTokenLimit: number; + maxCompletionTokens: number; + runningTokenCount: number; + tokensRemaining: number; +}; diff --git a/src/types/pull-requests.ts b/src/types/pull-requests.ts new file mode 100644 index 0000000..9b176b3 --- /dev/null +++ b/src/types/pull-requests.ts @@ -0,0 +1 @@ +export type CodeReviewStatus = "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; diff --git a/tests/__mocks__/handlers.ts b/tests/__mocks__/handlers.ts index be7ba62..2c2141b 100644 --- a/tests/__mocks__/handlers.ts +++ b/tests/__mocks__/handlers.ts @@ -85,9 +85,7 @@ export const handlers = [ db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pullNumber) } } }) ) ), - http.get("https://api.github.com/repos/:owner/:repo/languages", ({ params: { owner, repo } }) => - HttpResponse.json(db.repo.findFirst({ where: { owner: { login: { equals: owner as string } }, name: { equals: repo as string } } })) - ), + http.get("https://api.github.com/repos/:owner/:repo/languages", () => HttpResponse.json(["JavaScript", "TypeScript", "Python"])), http.get("https://api.github.com/repos/:owner/:repo/contents/:path", () => HttpResponse.json({ type: "file", @@ -97,4 +95,19 @@ export const handlers = [ content: Buffer.from(JSON.stringify({ content: "This is a mock README file" })).toString("base64"), }) ), + // [MSW] Warning: intercepted a request without a matching request handler: + + // • GET https://api.github.com/repos/ubiquity/test-repo/pulls/3/files?per_page=100?per_page=100 + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/files", () => + HttpResponse.json([ + { + sha: "abc123", + filename: "file1.txt", + status: "modified", + additions: 10, + deletions: 5, + changes: 15, + }, + ]) + ), ]; diff --git a/tests/main.test.ts b/tests/main.test.ts index 3ceaeb5..701b5b0 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -2,7 +2,6 @@ import { db } from "./__mocks__/db"; import { server } from "./__mocks__/node"; import usersGet from "./__mocks__/users-get.json"; import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it } from "@jest/globals"; -import { Logs } from "@ubiquity-os/ubiquity-os-logger"; import { Context, SupportedEventsU } from "../src/types"; import { drop } from "@mswjs/data"; import issueTemplate from "./__mocks__/issue-template"; @@ -11,7 +10,9 @@ import { askQuestion } from "../src/handlers/ask-llm"; import { runPlugin } from "../src/plugin"; import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; import { envSchema } from "../src/types/env"; -import { CompletionsType } from "../src/adapters/openai/helpers/completions"; +import { ResponseFromLlm } from "../src/adapters/openai/helpers/completions"; +import { ResponseFromLlm } from "../src/adapters/openai/helpers/completions"; +import { logger } from "../src/helpers/errors"; const TEST_QUESTION = "what is pi?"; const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?"; @@ -39,6 +40,23 @@ type Comment = { const octokit = jest.requireActual("@octokit/rest"); jest.requireActual("openai"); +// extractDependencies + +jest.mock("../src/handlers/ground-truths/chat-bot", () => { + return { + fetchRepoDependencies: jest.fn().mockReturnValue({ + dependencies: {}, + devDependencies: {}, + }), + extractDependencies: jest.fn(), + // [string, number][] + fetchRepoLanguageStats: jest.fn().mockReturnValue([ + ["JavaScript", 100], + ["TypeScript", 200], + ]), + }; +}); + beforeAll(() => { server.listen(); }); @@ -52,6 +70,7 @@ afterAll(() => server.close()); describe("Ask plugin tests", () => { beforeEach(async () => { + jest.clearAllMocks(); await setupTests(); }); @@ -102,23 +121,6 @@ describe("Ask plugin tests", () => { }); it("should construct the chat history correctly", async () => { - const ctx = createContext(TEST_SLASH_COMMAND); - const infoSpy = jest.spyOn(ctx.logger, "info"); - createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await runPlugin(ctx); - - expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); - expect(infoSpy).toHaveBeenNthCalledWith(4, "Answer: This is a mock answer for the chat", { - caller: LOG_CALLER, - tokenUsage: { - input: 1000, - output: 150, - total: 1150, - }, - }); - }); - - it("should collect the linked issues correctly", async () => { const ctx = createContext(TEST_SLASH_COMMAND); const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([ @@ -129,46 +131,59 @@ describe("Ask plugin tests", () => { ]); await runPlugin(ctx); - - expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); - - const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === + const prompt = `=== Current Task Specification === ubiquity/test-repo/1 === This is a demo spec for a demo task just perfect for testing. - === End Current Issue #1 Specification === - === Current Issue #1 Conversation === ubiquity/test-repo #1 === + === End Current Task Specification === ubiquity/test-repo/1 === + + === Current Task Conversation === ubiquity/test-repo/1 === 1 ubiquity: ${ISSUE_ID_2_CONTENT} [#2](https://www.github.com/ubiquity/test-repo/issues/2) 2 ubiquity: ${TEST_QUESTION} [#1](https://www.github.com/ubiquity/test-repo/issues/1) - === End Current Issue #1 Conversation === + === End Current Task Conversation === ubiquity/test-repo/1 === - === Linked Issue #2 Specification === ubiquity/test-repo/2 === + === README === ubiquity/test-repo/1 === + + {"content":"This is a mock README file"} + + === End README === ubiquity/test-repo/1 === + + === Linked Task Specification === ubiquity/test-repo/2 === Related to issue #3 - === End Linked Issue #2 Specification === + === End Linked Task Specification === ubiquity/test-repo/2 === - === Linked Issue #2 Conversation === ubiquity/test-repo #2 === + === Linked Task Conversation === ubiquity/test-repo/2 === 3 ubiquity: ${ISSUE_ID_3_CONTENT} [#3](https://www.github.com/ubiquity/test-repo/issues/3) - === End Linked Issue #2 Conversation === + === End Linked Task Conversation === ubiquity/test-repo/2 === - === Linked Issue #3 Specification === ubiquity/test-repo/3 === + === Linked Task Specification === ubiquity/test-repo/3 === Just another issue - === End Linked Issue #3 Specification === + === End Linked Task Specification === ubiquity/test-repo/3 === - === Linked Issue #3 Conversation === ubiquity/test-repo #3 === + === Linked Task Conversation === ubiquity/test-repo/3 === 4 ubiquity: Just a comment [#1](https://www.github.com/ubiquity/test-repo/issues/1) - 4 ubiquity: Just a comment [#1](https://www.github.com/ubiquity/test-repo/issues/1) - === End Linked Issue #3 Conversation ===\n - `; + === End Linked Task Conversation === ubiquity/test-repo/3 ===`; const normalizedExpected = normalizeString(prompt); - const normalizedReceived = normalizeString(infoSpy.mock.calls[1][0] as string); + const normalizedReceived = normalizeString(infoSpy.mock.calls[0][0] as string); expect(normalizedReceived).toEqual(normalizedExpected); + expect(infoSpy).toHaveBeenNthCalledWith(2, "Answer: This is a mock answer for the chat", { + caller: LOG_CALLER, + metadata: { + tokenUsage: { + input: 1000, + output: 150, + total: 1150, + }, + groundTruths: ["This is a mock answer for the chat"], + }, + }); }); }); @@ -256,17 +271,17 @@ function createContext(body = TEST_SLASH_COMMAND) { const user = db.users.findFirst({ where: { id: { equals: 1 } } }); return { payload: { - issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["issue"], + issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context<"issue_comment.created">["payload"]["issue"], sender: user, - repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["repository"], - comment: { body, user: user } as unknown as Context["payload"]["comment"], + repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context<"issue_comment.created">["payload"]["repository"], + comment: { body, user: user } as unknown as Context<"issue_comment.created">["payload"]["comment"], action: "created" as string, - installation: { id: 1 } as unknown as Context["payload"]["installation"], - organization: { login: "ubiquity" } as unknown as Context["payload"]["organization"], + installation: { id: 1 } as unknown as Context<"issue_comment.created">["payload"]["installation"], + organization: { login: "ubiquity" } as unknown as Context<"issue_comment.created">["payload"]["organization"], }, owner: "ubiquity", repo: "test-repo", - logger: new Logs("debug"), + logger: logger, config: {}, env: { UBIQUITY_OS_APP_NAME: "UbiquityOS", @@ -391,7 +406,13 @@ function createContext(body = TEST_SLASH_COMMAND) { }, openai: { completions: { - createCompletion: async (): Promise => { + getModelMaxTokenLimit: () => { + return 50000; + }, + getModelMaxOutputLimit: () => { + return 50000; + }, + createCompletion: async (): Promise => { return { answer: MOCK_ANSWER, groundTruths: [MOCK_ANSWER], @@ -413,5 +434,5 @@ function createContext(body = TEST_SLASH_COMMAND) { }, octokit: new octokit.Octokit(), eventName: "issue_comment.created" as SupportedEventsU, - } as unknown as Context; + } as unknown as Context<"issue_comment.created">; } diff --git a/tsconfig.json b/tsconfig.json index ba9d6b3..add1e99 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -9,8 +9,8 @@ // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ /* Language and Environment */ - "target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + "lib": ["ESNext"] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, // "jsx": "preserve", /* Specify what JSX code is generated. */ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ diff --git a/yarn.lock b/yarn.lock index ad622f5..a41885f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,6 +2,14 @@ # yarn lockfile v1 +"@actions/core@1.10.1": + version "1.10.1" + resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.1.tgz#61108e7ac40acae95ee36da074fa5850ca4ced8a" + integrity sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g== + dependencies: + "@actions/http-client" "^2.0.1" + uuid "^8.3.2" + "@actions/core@^1.11.1": version "1.11.1" resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.11.1.tgz#ae683aac5112438021588030efb53b1adb86f172" @@ -17,7 +25,7 @@ dependencies: "@actions/io" "^1.0.1" -"@actions/github@^6.0.0": +"@actions/github@6.0.0", "@actions/github@^6.0.0": version "6.0.0" resolved "https://registry.yarnpkg.com/@actions/github/-/github-6.0.0.tgz#65883433f9d81521b782a64cc1fd45eef2191ea7" integrity sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g== @@ -364,6 +372,11 @@ "@types/tough-cookie" "^4.0.5" tough-cookie "^4.1.4" +"@cfworker/json-schema@2.0.1": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@cfworker/json-schema/-/json-schema-2.0.1.tgz#563463393a1f19b06732491e604e0cc8255baf8a" + integrity sha512-1w7xVrTFjAWBVaOWRH5AMdKpJdltF4iy/d93E7qj8Rox6yY9OzEW1aC7T5eONrDOxXrlnsclPw9v24XW2c0mkg== + "@cloudflare/kv-asset-handler@0.3.4": version "0.3.4" resolved "https://registry.yarnpkg.com/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.3.4.tgz#5cc152847c8ae4d280ec5d7f4f6ba8c976b585c3" @@ -931,7 +944,7 @@ resolved "https://registry.yarnpkg.com/@cspell/url/-/url-8.9.0.tgz#313ccde44570b3158cb7baa3eb53e54572d7263f" integrity sha512-FaHTEx6OBVKlkX7VgAPofBZ5vIdxNWYalb0uZwJ5FCc/PCMIF5l91DQGQxRMas3qzRACR911kJamPdeK/3qilw== -"@cspotcode/source-map-support@0.8.1": +"@cspotcode/source-map-support@0.8.1", "@cspotcode/source-map-support@^0.8.0": version "0.8.1" resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== @@ -979,11 +992,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== -"@esbuild/aix-ppc64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz#b57697945b50e99007b4c2521507dc613d4a648c" - integrity sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw== - "@esbuild/android-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd" @@ -994,11 +1002,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== -"@esbuild/android-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz#1add7e0af67acefd556e407f8497e81fddad79c0" - integrity sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w== - "@esbuild/android-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d" @@ -1009,11 +1012,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== -"@esbuild/android-arm@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.24.0.tgz#ab7263045fa8e090833a8e3c393b60d59a789810" - integrity sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew== - "@esbuild/android-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1" @@ -1024,11 +1022,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== -"@esbuild/android-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.24.0.tgz#e8f8b196cfdfdd5aeaebbdb0110983460440e705" - integrity sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ== - "@esbuild/darwin-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276" @@ -1039,11 +1032,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== -"@esbuild/darwin-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz#2d0d9414f2acbffd2d86e98253914fca603a53dd" - integrity sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw== - "@esbuild/darwin-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb" @@ -1054,11 +1042,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== -"@esbuild/darwin-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz#33087aab31a1eb64c89daf3d2cf8ce1775656107" - integrity sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA== - "@esbuild/freebsd-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2" @@ -1069,11 +1052,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== -"@esbuild/freebsd-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz#bb76e5ea9e97fa3c753472f19421075d3a33e8a7" - integrity sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA== - "@esbuild/freebsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4" @@ -1084,11 +1062,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== -"@esbuild/freebsd-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz#e0e2ce9249fdf6ee29e5dc3d420c7007fa579b93" - integrity sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ== - "@esbuild/linux-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb" @@ -1099,11 +1072,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== -"@esbuild/linux-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz#d1b2aa58085f73ecf45533c07c82d81235388e75" - integrity sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g== - "@esbuild/linux-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a" @@ -1114,11 +1082,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== -"@esbuild/linux-arm@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz#8e4915df8ea3e12b690a057e77a47b1d5935ef6d" - integrity sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw== - "@esbuild/linux-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a" @@ -1129,11 +1092,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== -"@esbuild/linux-ia32@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz#8200b1110666c39ab316572324b7af63d82013fb" - integrity sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA== - "@esbuild/linux-loong64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72" @@ -1144,11 +1102,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== -"@esbuild/linux-loong64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz#6ff0c99cf647504df321d0640f0d32e557da745c" - integrity sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g== - "@esbuild/linux-mips64el@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289" @@ -1159,11 +1112,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== -"@esbuild/linux-mips64el@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz#3f720ccd4d59bfeb4c2ce276a46b77ad380fa1f3" - integrity sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA== - "@esbuild/linux-ppc64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7" @@ -1174,11 +1122,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== -"@esbuild/linux-ppc64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz#9d6b188b15c25afd2e213474bf5f31e42e3aa09e" - integrity sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ== - "@esbuild/linux-riscv64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09" @@ -1189,11 +1132,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== -"@esbuild/linux-riscv64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz#f989fdc9752dfda286c9cd87c46248e4dfecbc25" - integrity sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw== - "@esbuild/linux-s390x@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829" @@ -1204,11 +1142,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== -"@esbuild/linux-s390x@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz#29ebf87e4132ea659c1489fce63cd8509d1c7319" - integrity sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g== - "@esbuild/linux-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4" @@ -1219,11 +1152,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== -"@esbuild/linux-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz#4af48c5c0479569b1f359ffbce22d15f261c0cef" - integrity sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA== - "@esbuild/netbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462" @@ -1234,16 +1162,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== -"@esbuild/netbsd-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz#1ae73d23cc044a0ebd4f198334416fb26c31366c" - integrity sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg== - -"@esbuild/openbsd-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz#5d904a4f5158c89859fd902c427f96d6a9e632e2" - integrity sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg== - "@esbuild/openbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691" @@ -1254,11 +1172,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== -"@esbuild/openbsd-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz#4c8aa88c49187c601bae2971e71c6dc5e0ad1cdf" - integrity sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q== - "@esbuild/sunos-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273" @@ -1269,11 +1182,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== -"@esbuild/sunos-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz#8ddc35a0ea38575fa44eda30a5ee01ae2fa54dd4" - integrity sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA== - "@esbuild/win32-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f" @@ -1284,11 +1192,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== -"@esbuild/win32-arm64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz#6e79c8543f282c4539db684a207ae0e174a9007b" - integrity sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA== - "@esbuild/win32-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03" @@ -1299,11 +1202,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== -"@esbuild/win32-ia32@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz#057af345da256b7192d18b676a02e95d0fa39103" - integrity sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw== - "@esbuild/win32-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061" @@ -1314,11 +1212,6 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== -"@esbuild/win32-x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz#168ab1c7e1c318b922637fad8f339d48b01e1244" - integrity sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA== - "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -1748,23 +1641,10 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@octokit/app@^15.0.0": - version "15.1.0" - resolved "https://registry.yarnpkg.com/@octokit/app/-/app-15.1.0.tgz#b330d8826be088ec8d1d43a59dc27ef57d1232b2" - integrity sha512-TkBr7QgOmE6ORxvIAhDbZsqPkF7RSqTY4pLTtUQCvr6dTXqvi2fFo46q3h1lxlk/sGMQjqyZ0kEahkD/NyzOHg== - dependencies: - "@octokit/auth-app" "^7.0.0" - "@octokit/auth-unauthenticated" "^6.0.0" - "@octokit/core" "^6.1.2" - "@octokit/oauth-app" "^7.0.0" - "@octokit/plugin-paginate-rest" "^11.0.0" - "@octokit/types" "^13.0.0" - "@octokit/webhooks" "^13.0.0" - -"@octokit/auth-app@^7.0.0": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@octokit/auth-app/-/auth-app-7.1.1.tgz#d8916ad01e6ffb0a0a50507aa613e91fe7a49b93" - integrity sha512-kRAd6yelV9OgvlEJE88H0VLlQdZcag9UlLr7dV0YYP37X8PPDvhgiTy66QVhDXdyoT0AleFN2w/qXkPdrSzINg== +"@octokit/auth-app@7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@octokit/auth-app/-/auth-app-7.1.0.tgz#55a3d3b3b3607b9d375abbe946163dca3a25c2c9" + integrity sha512-cazGaJPSgeZ8NkVYeM/C5l/6IQ5vZnsI8p1aMucadCkt/bndI+q+VqwrlnWbASRmenjOkf1t1RpCKrif53U8gw== dependencies: "@octokit/auth-oauth-app" "^8.1.0" "@octokit/auth-oauth-user" "^5.1.0" @@ -1775,7 +1655,7 @@ universal-github-app-jwt "^2.2.0" universal-user-agent "^7.0.0" -"@octokit/auth-oauth-app@^8.0.0", "@octokit/auth-oauth-app@^8.1.0": +"@octokit/auth-oauth-app@^8.1.0": version "8.1.1" resolved "https://registry.yarnpkg.com/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz#6204affa6e86f535016799cadf2af9befe5e893c" integrity sha512-5UtmxXAvU2wfcHIPPDWzVSAWXVJzG3NWsxb7zCFplCWEmMCArSZV0UQu5jw5goLQXbFyOr5onzEH37UJB3zQQg== @@ -1817,13 +1697,18 @@ resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-5.1.1.tgz#3bbfe905111332a17f72d80bd0b51a3e2fa2cf07" integrity sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA== -"@octokit/auth-unauthenticated@^6.0.0", "@octokit/auth-unauthenticated@^6.0.0-beta.1": - version "6.1.0" - resolved "https://registry.yarnpkg.com/@octokit/auth-unauthenticated/-/auth-unauthenticated-6.1.0.tgz#de0fe923bb06ed93aea526ab99972a98c546d0bf" - integrity sha512-zPSmfrUAcspZH/lOFQnVnvjQZsIvmfApQH6GzJrkIunDooU1Su2qt2FfMTSVPRp7WLTQyC20Kd55lF+mIYaohQ== +"@octokit/core@6.1.2", "@octokit/core@^6.1.2": + version "6.1.2" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-6.1.2.tgz#20442d0a97c411612da206411e356014d1d1bd17" + integrity sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg== dependencies: + "@octokit/auth-token" "^5.0.0" + "@octokit/graphql" "^8.0.0" + "@octokit/request" "^9.0.0" "@octokit/request-error" "^6.0.1" "@octokit/types" "^13.0.0" + before-after-hook "^3.0.2" + universal-user-agent "^7.0.0" "@octokit/core@^5.0.1", "@octokit/core@^5.0.2": version "5.2.0" @@ -1838,19 +1723,6 @@ before-after-hook "^2.2.0" universal-user-agent "^6.0.0" -"@octokit/core@^6.0.0", "@octokit/core@^6.1.2": - version "6.1.2" - resolved "https://registry.yarnpkg.com/@octokit/core/-/core-6.1.2.tgz#20442d0a97c411612da206411e356014d1d1bd17" - integrity sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg== - dependencies: - "@octokit/auth-token" "^5.0.0" - "@octokit/graphql" "^8.0.0" - "@octokit/request" "^9.0.0" - "@octokit/request-error" "^6.0.1" - "@octokit/types" "^13.0.0" - before-after-hook "^3.0.2" - universal-user-agent "^7.0.0" - "@octokit/endpoint@^10.0.0": version "10.1.1" resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-10.1.1.tgz#1a9694e7aef6aa9d854dc78dd062945945869bcc" @@ -1867,6 +1739,14 @@ "@octokit/types" "^13.1.0" universal-user-agent "^6.0.0" +"@octokit/graphql-schema@^15.25.0": + version "15.25.0" + resolved "https://registry.yarnpkg.com/@octokit/graphql-schema/-/graphql-schema-15.25.0.tgz#30bb8ecc494c249650991b33f2f0d9332dbe87e9" + integrity sha512-aqz9WECtdxVWSqgKroUu9uu+CRt5KnfErWs0dBPKlTdrreAeWzS5NRu22ZVcGdPP7s3XDg2Gnf5iyoZPCRZWmQ== + dependencies: + graphql "^16.0.0" + graphql-tag "^2.10.3" + "@octokit/graphql@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-7.1.0.tgz#9bc1c5de92f026648131f04101cab949eeffe4e0" @@ -1885,20 +1765,6 @@ "@octokit/types" "^13.0.0" universal-user-agent "^7.0.0" -"@octokit/oauth-app@^7.0.0": - version "7.1.3" - resolved "https://registry.yarnpkg.com/@octokit/oauth-app/-/oauth-app-7.1.3.tgz#a0f256dd185e7c00bfbc3e6bc3c5aad66e42c609" - integrity sha512-EHXbOpBkSGVVGF1W+NLMmsnSsJRkcrnVmDKt0TQYRBb6xWfWzoi9sBD4DIqZ8jGhOWO/V8t4fqFyJ4vDQDn9bg== - dependencies: - "@octokit/auth-oauth-app" "^8.0.0" - "@octokit/auth-oauth-user" "^5.0.1" - "@octokit/auth-unauthenticated" "^6.0.0-beta.1" - "@octokit/core" "^6.0.0" - "@octokit/oauth-authorization-url" "^7.0.0" - "@octokit/oauth-methods" "^5.0.0" - "@types/aws-lambda" "^8.10.83" - universal-user-agent "^7.0.0" - "@octokit/oauth-authorization-url@^7.0.0": version "7.1.1" resolved "https://registry.yarnpkg.com/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz#0e17c2225eb66b58ec902d02b6f1315ffe9ff04b" @@ -1934,11 +1800,6 @@ resolved "https://registry.yarnpkg.com/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.3.0.tgz#a7a4da00c0f27f7f5708eb3fcebefa08f8d51125" integrity sha512-vKLsoR4xQxg4Z+6rU/F65ItTUz/EXbD+j/d4mlq2GW8TsA4Tc8Kdma2JTAAJ5hrKWUQzkR/Esn2fjsqiVRYaQg== -"@octokit/plugin-paginate-graphql@^5.0.0": - version "5.2.4" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-5.2.4.tgz#b6afda7b3f24cb93d2ab822ec8eac664a5d325d0" - integrity sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA== - "@octokit/plugin-paginate-rest@11.3.1": version "11.3.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.1.tgz#fe92d04b49f134165d6fbb716e765c2f313ad364" @@ -1946,6 +1807,13 @@ dependencies: "@octokit/types" "^13.5.0" +"@octokit/plugin-paginate-rest@11.3.3": + version "11.3.3" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.3.tgz#efc97ba66aae6797e2807a082f99b9cfc0e05aba" + integrity sha512-o4WRoOJZlKqEEgj+i9CpcmnByvtzoUYC6I8PD2SA95M+BJ2x8h7oLcVOg9qcowWXBOdcTRsMZiwvM3EyLm9AfA== + dependencies: + "@octokit/types" "^13.5.0" + "@octokit/plugin-paginate-rest@^11.0.0": version "11.3.5" resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.5.tgz#a1929b3ba3dc7b63bc73bb6d3c7a3faf2a9c7649" @@ -1965,6 +1833,11 @@ resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz#98a3ca96e0b107380664708111864cb96551f958" integrity sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA== +"@octokit/plugin-request-log@^5.3.1": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-5.3.1.tgz#ccb75d9705de769b2aa82bcd105cc96eb0c00f69" + integrity sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw== + "@octokit/plugin-rest-endpoint-methods@13.2.2": version "13.2.2" resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.2.tgz#af8e5dd2cddfea576f92ffaf9cb84659f302a638" @@ -1972,6 +1845,13 @@ dependencies: "@octokit/types" "^13.5.0" +"@octokit/plugin-rest-endpoint-methods@13.2.4": + version "13.2.4" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.4.tgz#543add032d3fe3f5d2839bfd619cf66d85469f01" + integrity sha512-gusyAVgTrPiuXOdfqOySMDztQHv6928PQ3E4dqVGEtOvRXAKRbJR4b1zQyniIT9waqaWk/UDaoJ2dyPr7Bk7Iw== + dependencies: + "@octokit/types" "^13.5.0" + "@octokit/plugin-rest-endpoint-methods@^10.0.0": version "10.4.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz#41ba478a558b9f554793075b2e20cd2ef973be17" @@ -1986,19 +1866,19 @@ dependencies: "@octokit/types" "^13.6.1" -"@octokit/plugin-retry@^7.0.0": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@octokit/plugin-retry/-/plugin-retry-7.1.2.tgz#242e2d19a72a50b5113bb25d7d2c622ce0373fa0" - integrity sha512-XOWnPpH2kJ5VTwozsxGurw+svB2e61aWlmk5EVIYZPwFK5F9h4cyPyj9CIKRyMXMHSwpIsI3mPOdpMmrRhe7UQ== +"@octokit/plugin-retry@7.1.1": + version "7.1.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-retry/-/plugin-retry-7.1.1.tgz#a84483e4afdd068dd71da81abe206a9e442c1288" + integrity sha512-G9Ue+x2odcb8E1XIPhaFBnTTIrrUDfXN05iFXiqhR+SeeeDMMILcAnysOsxUpEWcQp2e5Ft397FCXTcPkiPkLw== dependencies: "@octokit/request-error" "^6.0.0" "@octokit/types" "^13.0.0" bottleneck "^2.15.3" -"@octokit/plugin-throttling@^9.0.0": - version "9.3.2" - resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-9.3.2.tgz#cc05180e45e769d6726c5faed157e9ad3b6ab8c0" - integrity sha512-FqpvcTpIWFpMMwIeSoypoJXysSAQ3R+ALJhXXSG1HTP3YZOIeLmcNcimKaXxTcws+Sh6yoRl13SJ5r8sXc1Fhw== +"@octokit/plugin-throttling@9.3.1": + version "9.3.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-9.3.1.tgz#5648165e1e70e861625f3a16af6c55cafe861061" + integrity sha512-Qd91H4liUBhwLB2h6jZ99bsxoQdhgPk6TdwnClPyTBSDAdviGPceViEgUwj+pcQDmB/rfAXAXK7MTochpHM3yQ== dependencies: "@octokit/types" "^13.0.0" bottleneck "^2.15.3" @@ -2056,6 +1936,16 @@ "@octokit/plugin-request-log" "^4.0.0" "@octokit/plugin-rest-endpoint-methods" "13.2.2" +"@octokit/rest@^21.0.2": + version "21.0.2" + resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-21.0.2.tgz#9b767dbc1098daea8310fd8b76bf7a97215d5972" + integrity sha512-+CiLisCoyWmYicH25y1cDfCrv41kRSvTq6pPWtRroRJzhsCZWZyCqGyI8foJT5LmScADSwRAnr/xo+eewL04wQ== + dependencies: + "@octokit/core" "^6.1.2" + "@octokit/plugin-paginate-rest" "^11.0.0" + "@octokit/plugin-request-log" "^5.3.1" + "@octokit/plugin-rest-endpoint-methods" "^13.0.0" + "@octokit/types@^12.6.0": version "12.6.0" resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.6.0.tgz#8100fb9eeedfe083aae66473bd97b15b62aedcb2" @@ -2082,6 +1972,11 @@ resolved "https://registry.yarnpkg.com/@octokit/webhooks-methods/-/webhooks-methods-5.1.0.tgz#13b6c08f89902c1ab0ddf31c6eeeec9c2772cfe6" integrity sha512-yFZa3UH11VIxYnnoOYCVoJ3q4ChuSOk2IVBBQ0O3xtKX4x9bmKb/1t+Mxixv2iUhzMdOl1qeWJqEhouXXzB3rQ== +"@octokit/webhooks-types@7.5.1": + version "7.5.1" + resolved "https://registry.yarnpkg.com/@octokit/webhooks-types/-/webhooks-types-7.5.1.tgz#e05399ab6bbbef8b78eb6bfc1a2cb138ea861104" + integrity sha512-1dozxWEP8lKGbtEu7HkRbK1F/nIPuJXNfT0gd96y6d3LcHZTtRtlf8xz3nicSJfesADxJyDh+mWBOsdLkqgzYw== + "@octokit/webhooks@13.2.7": version "13.2.7" resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-13.2.7.tgz#03f89b278cd63f271eba3062f0b75ddd18a82252" @@ -2092,7 +1987,7 @@ "@octokit/webhooks-methods" "^5.0.0" aggregate-error "^5.0.0" -"@octokit/webhooks@^13.0.0": +"@octokit/webhooks@13.3.0": version "13.3.0" resolved "https://registry.yarnpkg.com/@octokit/webhooks/-/webhooks-13.3.0.tgz#fd5d54d47c789c75d60a00eb04e982152d7c654a" integrity sha512-TUkJLtI163Bz5+JK0O+zDkQpn4gKwN+BovclUvCj6pI/6RXrFqQvUMRS2M+Rt8Rv0qR3wjoMoOPmpJKeOh0nBg== @@ -2129,6 +2024,11 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.32.33.tgz#823af450f6f1571a85c12e2b1f2a0b134f61920f" integrity sha512-jM50BfkKA0fwfj0uRRO6asfNfbU0oZipJIb/bL2+BUH/THjuEf2BMiqBOvKfBji5Z9t59NboZQGNfKZbdV50Iw== +"@sinclair/typebox@0.32.35": + version "0.32.35" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.32.35.tgz#41c04473509478df9895800018a3d3ae7d40fb3c" + integrity sha512-Ul3YyOTU++to8cgNkttakC0dWvpERr6RYoHO2W47DLbFvrwBDJUY31B1sImH6JZSYc4Kt4PyHtoPNu+vL2r2dA== + "@sinclair/typebox@^0.27.8": version "0.27.8" resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" @@ -2214,10 +2114,25 @@ "@supabase/realtime-js" "2.10.2" "@supabase/storage-js" "2.7.0" -"@types/aws-lambda@^8.10.83": - version "8.10.145" - resolved "https://registry.yarnpkg.com/@types/aws-lambda/-/aws-lambda-8.10.145.tgz#b2d31a987f4888e5553ff1819f57cafa475594d9" - integrity sha512-dtByW6WiFk5W5Jfgz1VM+YPA21xMXTuSFoLYIDY0L44jDLLflVPtZkYuu3/YxpGcvjzKFBZLU+GyKjR0HOYtyw== +"@tsconfig/node10@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.11.tgz#6ee46400685f130e278128c7b38b7e031ff5b2f2" + integrity sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== "@types/babel__core@^7.1.14": version "7.20.5" @@ -2493,6 +2408,33 @@ "@typescript-eslint/types" "7.13.1" eslint-visitor-keys "^3.4.3" +"@ubiquity-os/ubiquity-os-kernel@^2.4.0": + version "2.4.0" + resolved "https://registry.yarnpkg.com/@ubiquity-os/ubiquity-os-kernel/-/ubiquity-os-kernel-2.4.0.tgz#1bb74d4b02ef5ba6b0c1c01d509e3e0f58609904" + integrity sha512-KT8AwtMOHA99GoVUs43eAR2PZii9AHmY9NjOlBtvotB5tXbeEIyhjgHr0kgRncgiLJU1UFIe0QYMmpOvmXiQpg== + dependencies: + "@actions/core" "1.10.1" + "@actions/github" "6.0.0" + "@cfworker/json-schema" "2.0.1" + "@octokit/auth-app" "7.1.0" + "@octokit/core" "6.1.2" + "@octokit/plugin-paginate-rest" "11.3.3" + "@octokit/plugin-rest-endpoint-methods" "13.2.4" + "@octokit/plugin-retry" "7.1.1" + "@octokit/plugin-throttling" "9.3.1" + "@octokit/rest" "^21.0.2" + "@octokit/types" "^13.5.0" + "@octokit/webhooks" "13.3.0" + "@octokit/webhooks-types" "7.5.1" + "@sinclair/typebox" "0.32.35" + "@ubiquity-os/ubiquity-os-logger" "^1.3.2" + dotenv "16.4.5" + hono "4.4.13" + smee-client "2.0.1" + ts-node "^10.9.2" + typebox-validators "0.3.5" + yaml "2.4.5" + "@ubiquity-os/ubiquity-os-logger@^1.3.2": version "1.3.2" resolved "https://registry.yarnpkg.com/@ubiquity-os/ubiquity-os-logger/-/ubiquity-os-logger-1.3.2.tgz#4423bc0baeac5c2f73123d15fd961310521163cd" @@ -2518,11 +2460,23 @@ acorn-jsx@^5.3.2: resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== +acorn-walk@^8.1.1: + version "8.3.4" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.4.tgz#794dd169c3977edf4ba4ea47583587c5866236b7" + integrity sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g== + dependencies: + acorn "^8.11.0" + acorn-walk@^8.2.0: version "8.3.2" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== +acorn@^8.11.0, acorn@^8.4.1: + version "8.13.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.13.0.tgz#2a30d670818ad16ddd6a35d3842dacec9e5d7ca3" + integrity sha512-8zSiw54Oxrdym50NlZ9sUusyO1Z1ZchgRLWRaK6c86XJFClyCgFKetdowBg5bKxyp/u+CDBJG4Mpp0m3HLZl9w== + acorn@^8.12.0: version "8.12.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.0.tgz#1627bfa2e058148036133b8d9b51a700663c294c" @@ -2630,6 +2584,11 @@ anymatch@^3.0.3, anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -3082,7 +3041,7 @@ combined-stream@^1.0.8: dependencies: delayed-stream "~1.0.0" -commander@^12.1.0, commander@~12.1.0: +commander@^12.0.0, commander@^12.1.0, commander@~12.1.0: version "12.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" integrity sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA== @@ -3197,6 +3156,11 @@ create-jest@^29.7.0: jest-util "^29.7.0" prompts "^2.0.1" +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" @@ -3463,6 +3427,11 @@ diff3@0.0.3: resolved "https://registry.yarnpkg.com/diff3/-/diff3-0.0.3.tgz#d4e5c3a4cdf4e5fe1211ab42e693fcb4321580fc" integrity sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g== +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -3477,7 +3446,7 @@ dot-prop@^5.1.0: dependencies: is-obj "^2.0.0" -dotenv@^16.3.1, dotenv@^16.4.5: +dotenv@16.4.5, dotenv@^16.4.5: version "16.4.5" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== @@ -3645,36 +3614,6 @@ esbuild@0.17.19: "@esbuild/win32-ia32" "0.17.19" "@esbuild/win32-x64" "0.17.19" -esbuild@^0.24.0: - version "0.24.0" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.24.0.tgz#f2d470596885fcb2e91c21eb3da3b3c89c0b55e7" - integrity sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ== - optionalDependencies: - "@esbuild/aix-ppc64" "0.24.0" - "@esbuild/android-arm" "0.24.0" - "@esbuild/android-arm64" "0.24.0" - "@esbuild/android-x64" "0.24.0" - "@esbuild/darwin-arm64" "0.24.0" - "@esbuild/darwin-x64" "0.24.0" - "@esbuild/freebsd-arm64" "0.24.0" - "@esbuild/freebsd-x64" "0.24.0" - "@esbuild/linux-arm" "0.24.0" - "@esbuild/linux-arm64" "0.24.0" - "@esbuild/linux-ia32" "0.24.0" - "@esbuild/linux-loong64" "0.24.0" - "@esbuild/linux-mips64el" "0.24.0" - "@esbuild/linux-ppc64" "0.24.0" - "@esbuild/linux-riscv64" "0.24.0" - "@esbuild/linux-s390x" "0.24.0" - "@esbuild/linux-x64" "0.24.0" - "@esbuild/netbsd-x64" "0.24.0" - "@esbuild/openbsd-arm64" "0.24.0" - "@esbuild/openbsd-x64" "0.24.0" - "@esbuild/sunos-x64" "0.24.0" - "@esbuild/win32-arm64" "0.24.0" - "@esbuild/win32-ia32" "0.24.0" - "@esbuild/win32-x64" "0.24.0" - esbuild@~0.21.4: version "0.21.5" resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" @@ -3871,6 +3810,11 @@ events@^3.3.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== +eventsource@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-2.0.2.tgz#76dfcc02930fb2ff339520b6d290da573a9e8508" + integrity sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA== + execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" @@ -4182,15 +4126,6 @@ git-raw-commits@^4.0.0: meow "^12.0.1" split2 "^4.0.0" -github-diff-tool@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/github-diff-tool/-/github-diff-tool-1.0.6.tgz#e633b46397db850ad3dc0d500450357cb7ee26f9" - integrity sha512-DOqKck+WUj3HsfOwef5cjS32qqOkKWFncIl4erBtp2+dfccrkSi6Ee14mKGnrQaAhMrx/9LWFh8X5KGivZVY8A== - dependencies: - dotenv "^16.3.1" - esbuild "^0.24.0" - octokit "^4.0.2" - glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" @@ -4266,11 +4201,6 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -gpt-tokenizer@^2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/gpt-tokenizer/-/gpt-tokenizer-2.5.1.tgz#ff1175b9ae1325f0f5281e9797af078cb29295dc" - integrity sha512-26zNjvGrIf+a6yWg5l2DvNT4LXAmotHyx7IomHVhXiUs62BwKVFLv/l8yRQQrkUDc2XDtzCdjcNuJqzOjxxiPA== - graceful-fs@^4.1.2, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -4281,7 +4211,14 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -graphql@^16.8.1: +graphql-tag@^2.10.3: + version "2.12.6" + resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.6.tgz#d441a569c1d2537ef10ca3d1633b48725329b5f1" + integrity sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg== + dependencies: + tslib "^2.1.0" + +graphql@^16.0.0, graphql@^16.8.1: version "16.9.0" resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.9.0.tgz#1c310e63f16a49ce1fbb230bd0a000e99f6f115f" integrity sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw== @@ -4347,6 +4284,11 @@ headers-polyfill@^4.0.2: resolved "https://registry.yarnpkg.com/headers-polyfill/-/headers-polyfill-4.0.3.tgz#922a0155de30ecc1f785bcf04be77844ca95ad07" integrity sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ== +hono@4.4.13: + version "4.4.13" + resolved "https://registry.yarnpkg.com/hono/-/hono-4.4.13.tgz#954e8f6e4bab14f3f9d7bac4eef4c56d23e7f900" + integrity sha512-c6qqenclmQ6wpXzqiElMa2jt423PVCmgBreDfC5s2lPPpGk7d0lOymd8QTzFZyYC5mSSs6imiTMPip+gLwuW/g== + hosted-git-info@^2.1.4: version "2.8.9" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" @@ -5422,7 +5364,7 @@ make-dir@^4.0.0: dependencies: semver "^7.5.3" -make-error@1.x: +make-error@1.x, make-error@^1.1.1: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -5716,22 +5658,6 @@ object.assign@^4.1.5: has-symbols "^1.0.3" object-keys "^1.1.1" -octokit@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/octokit/-/octokit-4.0.2.tgz#775d68d363cdaec69d7b73d3dc82ae909d30f59b" - integrity sha512-wbqF4uc1YbcldtiBFfkSnquHtECEIpYD78YUXI6ri1Im5OO2NLo6ZVpRdbJpdnpZ05zMrVPssNiEo6JQtea+Qg== - dependencies: - "@octokit/app" "^15.0.0" - "@octokit/core" "^6.0.0" - "@octokit/oauth-app" "^7.0.0" - "@octokit/plugin-paginate-graphql" "^5.0.0" - "@octokit/plugin-paginate-rest" "^11.0.0" - "@octokit/plugin-rest-endpoint-methods" "^13.0.0" - "@octokit/plugin-retry" "^7.0.0" - "@octokit/plugin-throttling" "^9.0.0" - "@octokit/request-error" "^6.0.0" - "@octokit/types" "^13.0.0" - ohash@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/ohash/-/ohash-1.1.4.tgz#ae8d83014ab81157d2c285abf7792e2995fadd72" @@ -6417,6 +6343,15 @@ slice-ansi@^7.0.0: ansi-styles "^6.2.1" is-fullwidth-code-point "^5.0.0" +smee-client@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/smee-client/-/smee-client-2.0.1.tgz#348a644c3499cc7687fcb42fbbaeeeb3211a365d" + integrity sha512-s2+eG9vNMWQQvu8Jz+SfAiihpYsmaMtcyPnHtBuZEhaAAQOQV63xSSL9StWv2p08xKgvSC8pEZ28rXoy41FhLg== + dependencies: + commander "^12.0.0" + eventsource "^2.0.2" + validator "^13.11.0" + smol-toml@^1.1.4: version "1.2.1" resolved "https://registry.yarnpkg.com/smol-toml/-/smol-toml-1.2.1.tgz#6216334548763d4aac76cafff19f8914937ee13a" @@ -6764,6 +6699,25 @@ ts-jest@29.1.5: semver "^7.5.3" yargs-parser "^21.0.1" +ts-node@^10.9.2: + version "10.9.2" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f" + integrity sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + tsconfig-paths@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz#ef78e19039133446d244beac0fd6a1632e2d107c" @@ -6773,6 +6727,11 @@ tsconfig-paths@^4.2.0: minimist "^1.2.6" strip-bom "^3.0.0" +tslib@^2.1.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.0.tgz#d124c86c3c05a40a91e6fdea4021bd31d377971b" + integrity sha512-jWVzBLplnCmoaTr13V9dYbiQ99wvZRd0vNWaDRg+aVYRcjDF3nDksxFDE/+fkXnKhpnUUkmx5pK/v8mCtLVqZA== + tslib@^2.2.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" @@ -6873,10 +6832,10 @@ typescript-eslint@7.13.1: "@typescript-eslint/parser" "7.13.1" "@typescript-eslint/utils" "7.13.1" -typescript@^5.6.3: - version "5.6.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.3.tgz#5f3449e31c9d94febb17de03cc081dd56d81db5b" - integrity sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw== +typescript@5.4.5: + version "5.4.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== ufo@^1.5.4: version "1.5.4" @@ -6990,6 +6949,11 @@ uuid@^8.3.1, uuid@^8.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + v8-to-istanbul@^9.0.1: version "9.2.0" resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz#2ed7644a245cddd83d4e087b9b33b3e62dfd10ad" @@ -7007,6 +6971,11 @@ validate-npm-package-license@^3.0.1: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" +validator@^13.11.0: + version "13.12.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.12.0.tgz#7d78e76ba85504da3fee4fd1922b385914d4b35f" + integrity sha512-c1Q0mCiPlgdTVVVIJIrBuxNicYE+t/7oKeI9MWLj3fh/uq2Pxh/3eeWbVZ4OcGW1TUf53At0njHw5SMdA3tmMg== + vlq@^0.2.1: version "0.2.3" resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" @@ -7214,7 +7183,7 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== -yaml@^2.4.5: +yaml@2.4.5, yaml@^2.4.5: version "2.4.5" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.4.5.tgz#60630b206dd6d84df97003d33fc1ddf6296cca5e" integrity sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg== @@ -7242,6 +7211,11 @@ yargs@^17.0.0, yargs@^17.3.1, yargs@^17.7.2: y18n "^5.0.5" yargs-parser "^21.1.1" +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"