diff --git a/README.md b/README.md index cac1f27..75ffd81 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ To set up the `.dev.vars` file, you will need to provide the following variables - Add the following to your `.ubiquibot-config.yml` file with the appropriate URL: ```javascript -plugin: http://127.0.0.1:4000 - runsOn: [ "issue_comment.created", "issue_comment.edited", "issue_comment.deleted" , "issues.opened", "issues.edited", "issues.deleted"] + runsOn: [ "issue_comment.created", "issue_comment.edited", "issue_comment.deleted" , "issues.opened", "issues.edited", "issues.deleted", "issues.labeled"] ``` diff --git a/manifest.json b/manifest.json index 328a300..dbd801c 100644 --- a/manifest.json +++ b/manifest.json @@ -1,5 +1,5 @@ { "name": "@ubiquity-os/comment-vector-embeddings", "description": "Issue comment plugin for Ubiquibot. It enables the storage, updating, and deletion of issue comment embeddings.", - "ubiquity:listeners": ["issue_comment.created", "issue_comment.edited", "issue_comment.deleted", "issues.opened", "issues.edited", "issues.deleted"] + "ubiquity:listeners": ["issue_comment.created", "issue_comment.edited", "issue_comment.deleted", "issues.opened", "issues.edited", "issues.deleted", "issues.labeled"] } diff --git a/src/adapters/supabase/helpers/issues.ts b/src/adapters/supabase/helpers/issues.ts index 5f30821..6bfef09 100644 --- a/src/adapters/supabase/helpers/issues.ts +++ b/src/adapters/supabase/helpers/issues.ts @@ -9,6 +9,17 @@ export interface IssueSimilaritySearchResult { similarity: number; } +export interface IssueType { + id: string; + markdown?: string; + plaintext?: string; + payload?: Record; + author_id: number; + created_at: string; + modified_at: string; + embedding: number[]; +} + export class Issues extends SuperSupabase { constructor(supabase: SupabaseClient, context: Context) { super(supabase, context); @@ -66,6 +77,19 @@ export class Issues extends SuperSupabase { } } + async getIssue(issueNodeId: string): Promise { + const { data, error } = await this.supabase + .from("issues") // Provide the second type argument + .select("*") + .eq("id", issueNodeId) + .returns(); + if (error) { + this.context.logger.error("Error getting issue", error); + return null; + } + return data; + } + async findSimilarIssues(markdown: string, threshold: number, currentId: string): Promise { const embedding = await this.context.adapters.voyage.embedding.createEmbedding(markdown); const { data, error } = await this.supabase.rpc("find_similar_issues", { @@ -79,4 +103,11 @@ export class Issues extends SuperSupabase { } return data; } + + async updatePayload(issueNodeId: string, payload: Record) { + const { error } = await this.supabase.from("issues").update({ payload }).eq("id", issueNodeId); + if (error) { + this.context.logger.error("Error updating issue payload", error); + } + } } diff --git a/src/handlers/issue-deduplication.ts b/src/handlers/issue-deduplication.ts index 2378a1e..6174e4f 100644 --- a/src/handlers/issue-deduplication.ts +++ b/src/handlers/issue-deduplication.ts @@ -27,7 +27,6 @@ export async function issueChecker(context: Context): Promise { // Fetch all similar issues based on settings.warningThreshold const similarIssues = await supabase.issue.findSimilarIssues(issueContent, context.config.warningThreshold, issue.node_id); - console.log(similarIssues); if (similarIssues && similarIssues.length > 0) { const matchIssues = similarIssues.filter((issue) => issue.similarity >= context.config.matchThreshold); diff --git a/src/handlers/issue-matching.ts b/src/handlers/issue-matching.ts new file mode 100644 index 0000000..cc1d060 --- /dev/null +++ b/src/handlers/issue-matching.ts @@ -0,0 +1,150 @@ +import { Context } from "../types"; +import { IssuePayload } from "../types/payload"; + +export interface IssueGraphqlResponse { + node: { + title: string; + url: string; + state: string; + stateReason: string; + closed: boolean; + repository: { + owner: { + login: string; + }; + name: string; + }; + assignees: { + nodes: Array<{ + login: string; + url: string; + }>; + }; + }; + similarity: number; +} + +const commentBuilder = (matchResultArray: Map>): string => { + const commentLines: string[] = [">[!NOTE]", ">The following contributors may be suitable for this task:"]; + matchResultArray.forEach((issues, assignee) => { + commentLines.push(`>### [${assignee}](https://www.github.com/${assignee})`); + issues.forEach((issue) => { + commentLines.push(issue); + }); + }); + return commentLines.join("\n"); +}; + +export async function issueMatching(context: Context) { + const { + logger, + adapters: { supabase }, + octokit, + } = context; + const { payload } = context as { payload: IssuePayload }; + const issue = payload.issue; + const issueContent = issue.body + issue.title; + const commentStart = ">The following contributors may be suitable for this task:"; + + // On Adding the labels to the issue, the bot should + // create a new comment with users who completed task most similar to the issue + // if the comment already exists, it should update the comment with the new users + const matchResultArray: Map> = new Map(); + const similarIssues = await supabase.issue.findSimilarIssues(issueContent, context.config.jobMatchingThreshold, issue.node_id); + if (similarIssues && similarIssues.length > 0) { + // Find the most similar issue and the users who completed the task + similarIssues.sort((a, b) => b.similarity - a.similarity); + const fetchPromises = similarIssues.map(async (issue) => { + const issueObject: IssueGraphqlResponse = await context.octokit.graphql( + `query ($issueNodeId: ID!) { + node(id: $issueNodeId) { + ... on Issue { + title + url + state + repository{ + name + owner { + login + } + } + stateReason + closed + assignees(first: 10) { + nodes { + login + url + } + } + } + } + }`, + { issueNodeId: issue.issue_id } + ); + issueObject.similarity = issue.similarity; + return issueObject; + }); + + const issueList = await Promise.all(fetchPromises); + issueList.forEach((issue) => { + if (issue.node.closed && issue.node.stateReason === "COMPLETED" && issue.node.assignees.nodes.length > 0) { + const assignees = issue.node.assignees.nodes; + assignees.forEach((assignee) => { + const similarityPercentage = Math.round(issue.similarity * 100); + const issueLink = issue.node.url.replace(/https?:\/\/github.com/, "https://www.github.com"); + if (matchResultArray.has(assignee.login)) { + matchResultArray + .get(assignee.login) + ?.push( + `> \`${similarityPercentage}% Match\` [${issue.node.repository.owner.login}/${issue.node.repository.name}#${issue.node.url.split("/").pop()}](${issueLink})` + ); + } else { + matchResultArray.set(assignee.login, [ + `> \`${similarityPercentage}% Match\` [${issue.node.repository.owner.login}/${issue.node.repository.name}#${issue.node.url.split("/").pop()}](${issueLink})`, + ]); + } + }); + } + }); + // Fetch if any previous comment exists + const listIssues = await octokit.issues.listComments({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + issue_number: issue.number, + }); + //Check if the comment already exists + const existingComment = listIssues.data.find((comment) => comment.body && comment.body.includes(">[!NOTE]" + "\n" + commentStart)); + //Check if matchResultArray is empty + if (matchResultArray && matchResultArray.size === 0) { + if (existingComment) { + // If the comment already exists, delete it + await octokit.issues.deleteComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + comment_id: existingComment.id, + }); + } + logger.debug("No similar issues found"); + return; + } + const comment = commentBuilder(matchResultArray); + if (existingComment) { + await context.octokit.issues.updateComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + comment_id: existingComment.id, + body: comment, + }); + } else { + await context.octokit.issues.createComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + issue_number: payload.issue.number, + body: comment, + }); + } + } + + logger.ok(`Successfully created issue comment!`); + logger.debug(`Exiting issueMatching handler`); +} diff --git a/src/plugin.ts b/src/plugin.ts index 0d0876c..197948b 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -14,6 +14,7 @@ import { deleteIssues } from "./handlers/delete-issue"; import { addIssue } from "./handlers/add-issue"; import { updateIssue } from "./handlers/update-issue"; import { issueChecker } from "./handlers/issue-deduplication"; +import { issueMatching } from "./handlers/issue-matching"; /** * The main plugin function. Split for easier testing. @@ -33,13 +34,17 @@ export async function runPlugin(context: Context) { switch (eventName) { case "issues.opened": await issueChecker(context); - return await addIssue(context); + await addIssue(context); + return await issueMatching(context); case "issues.edited": await issueChecker(context); - return await updateIssue(context); + await updateIssue(context); + return await issueMatching(context); case "issues.deleted": return await deleteIssues(context); } + } else if (eventName == "issues.labeled") { + return await issueMatching(context); } else { logger.error(`Unsupported event: ${eventName}`); } diff --git a/src/types/context.ts b/src/types/context.ts index 1227abf..b11ac2f 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -16,7 +16,8 @@ export type SupportedEventsU = | "issue_comment.edited" | "issues.opened" | "issues.edited" - | "issues.deleted"; + | "issues.deleted" + | "issues.labeled"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; diff --git a/src/types/database.ts b/src/types/database.ts index f741570..df59882 100644 --- a/src/types/database.ts +++ b/src/types/database.ts @@ -1,6 +1,31 @@ export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[]; export type Database = { + graphql_public: { + Tables: { + [_ in never]: never; + }; + Views: { + [_ in never]: never; + }; + Functions: { + graphql: { + Args: { + operationName?: string; + query?: string; + variables?: Json; + extensions?: Json; + }; + Returns: Json; + }; + }; + Enums: { + [_ in never]: never; + }; + CompositeTypes: { + [_ in never]: never; + }; + }; public: { Tables: { issue_comments: { @@ -9,24 +34,77 @@ export type Database = { created_at: string; embedding: string; id: string; + issue_id: string | null; + markdown: string | null; modified_at: string; + payloadobject: Json | null; plaintext: string | null; + type: string; }; Insert: { author_id: string; created_at?: string; embedding: string; id: string; + issue_id?: string | null; + markdown?: string | null; modified_at?: string; + payloadobject?: Json | null; plaintext?: string | null; + type: string; }; Update: { author_id?: string; created_at?: string; embedding?: string; id?: string; + issue_id?: string | null; + markdown?: string | null; + modified_at?: string; + payloadobject?: Json | null; + plaintext?: string | null; + type?: string; + }; + Relationships: [ + { + foreignKeyName: "issue_comments_issue_id_fkey"; + columns: ["issue_id"]; + isOneToOne: false; + referencedRelation: "issues"; + referencedColumns: ["id"]; + }, + ]; + }; + issues: { + Row: { + created_at: string; + embedding: string; + id: string; + markdown: string | null; + modified_at: string; + payload: Json | null; + plaintext: string | null; + type: string; + }; + Insert: { + created_at?: string; + embedding: string; + id: string; + markdown?: string | null; + modified_at?: string; + payload?: Json | null; + plaintext?: string | null; + type?: string; + }; + Update: { + created_at?: string; + embedding?: string; + id?: string; + markdown?: string | null; modified_at?: string; + payload?: Json | null; plaintext?: string | null; + type?: string; }; Relationships: []; }; @@ -48,6 +126,17 @@ export type Database = { }; Returns: unknown; }; + find_similar_issues: { + Args: { + query_embedding: string; + threshold: number; + }; + Returns: { + id: number; + issue: string; + similarity: number; + }[]; + }; halfvec_avg: { Args: { "": number[]; @@ -215,6 +304,321 @@ export type Database = { [_ in never]: never; }; }; + storage: { + Tables: { + buckets: { + Row: { + allowed_mime_types: string[] | null; + avif_autodetection: boolean | null; + created_at: string | null; + file_size_limit: number | null; + id: string; + name: string; + owner: string | null; + owner_id: string | null; + public: boolean | null; + updated_at: string | null; + }; + Insert: { + allowed_mime_types?: string[] | null; + avif_autodetection?: boolean | null; + created_at?: string | null; + file_size_limit?: number | null; + id: string; + name: string; + owner?: string | null; + owner_id?: string | null; + public?: boolean | null; + updated_at?: string | null; + }; + Update: { + allowed_mime_types?: string[] | null; + avif_autodetection?: boolean | null; + created_at?: string | null; + file_size_limit?: number | null; + id?: string; + name?: string; + owner?: string | null; + owner_id?: string | null; + public?: boolean | null; + updated_at?: string | null; + }; + Relationships: []; + }; + migrations: { + Row: { + executed_at: string | null; + hash: string; + id: number; + name: string; + }; + Insert: { + executed_at?: string | null; + hash: string; + id: number; + name: string; + }; + Update: { + executed_at?: string | null; + hash?: string; + id?: number; + name?: string; + }; + Relationships: []; + }; + objects: { + Row: { + bucket_id: string | null; + created_at: string | null; + id: string; + last_accessed_at: string | null; + metadata: Json | null; + name: string | null; + owner: string | null; + owner_id: string | null; + path_tokens: string[] | null; + updated_at: string | null; + user_metadata: Json | null; + version: string | null; + }; + Insert: { + bucket_id?: string | null; + created_at?: string | null; + id?: string; + last_accessed_at?: string | null; + metadata?: Json | null; + name?: string | null; + owner?: string | null; + owner_id?: string | null; + path_tokens?: string[] | null; + updated_at?: string | null; + user_metadata?: Json | null; + version?: string | null; + }; + Update: { + bucket_id?: string | null; + created_at?: string | null; + id?: string; + last_accessed_at?: string | null; + metadata?: Json | null; + name?: string | null; + owner?: string | null; + owner_id?: string | null; + path_tokens?: string[] | null; + updated_at?: string | null; + user_metadata?: Json | null; + version?: string | null; + }; + Relationships: [ + { + foreignKeyName: "objects_bucketId_fkey"; + columns: ["bucket_id"]; + isOneToOne: false; + referencedRelation: "buckets"; + referencedColumns: ["id"]; + }, + ]; + }; + s3_multipart_uploads: { + Row: { + bucket_id: string; + created_at: string; + id: string; + in_progress_size: number; + key: string; + owner_id: string | null; + upload_signature: string; + user_metadata: Json | null; + version: string; + }; + Insert: { + bucket_id: string; + created_at?: string; + id: string; + in_progress_size?: number; + key: string; + owner_id?: string | null; + upload_signature: string; + user_metadata?: Json | null; + version: string; + }; + Update: { + bucket_id?: string; + created_at?: string; + id?: string; + in_progress_size?: number; + key?: string; + owner_id?: string | null; + upload_signature?: string; + user_metadata?: Json | null; + version?: string; + }; + Relationships: [ + { + foreignKeyName: "s3_multipart_uploads_bucket_id_fkey"; + columns: ["bucket_id"]; + isOneToOne: false; + referencedRelation: "buckets"; + referencedColumns: ["id"]; + }, + ]; + }; + s3_multipart_uploads_parts: { + Row: { + bucket_id: string; + created_at: string; + etag: string; + id: string; + key: string; + owner_id: string | null; + part_number: number; + size: number; + upload_id: string; + version: string; + }; + Insert: { + bucket_id: string; + created_at?: string; + etag: string; + id?: string; + key: string; + owner_id?: string | null; + part_number: number; + size?: number; + upload_id: string; + version: string; + }; + Update: { + bucket_id?: string; + created_at?: string; + etag?: string; + id?: string; + key?: string; + owner_id?: string | null; + part_number?: number; + size?: number; + upload_id?: string; + version?: string; + }; + Relationships: [ + { + foreignKeyName: "s3_multipart_uploads_parts_bucket_id_fkey"; + columns: ["bucket_id"]; + isOneToOne: false; + referencedRelation: "buckets"; + referencedColumns: ["id"]; + }, + { + foreignKeyName: "s3_multipart_uploads_parts_upload_id_fkey"; + columns: ["upload_id"]; + isOneToOne: false; + referencedRelation: "s3_multipart_uploads"; + referencedColumns: ["id"]; + }, + ]; + }; + }; + Views: { + [_ in never]: never; + }; + Functions: { + can_insert_object: { + Args: { + bucketid: string; + name: string; + owner: string; + metadata: Json; + }; + Returns: undefined; + }; + extension: { + Args: { + name: string; + }; + Returns: string; + }; + filename: { + Args: { + name: string; + }; + Returns: string; + }; + foldername: { + Args: { + name: string; + }; + Returns: string[]; + }; + get_size_by_bucket: { + Args: Record; + Returns: { + size: number; + bucket_id: string; + }[]; + }; + list_multipart_uploads_with_delimiter: { + Args: { + bucket_id: string; + prefix_param: string; + delimiter_param: string; + max_keys?: number; + next_key_token?: string; + next_upload_token?: string; + }; + Returns: { + key: string; + id: string; + created_at: string; + }[]; + }; + list_objects_with_delimiter: { + Args: { + bucket_id: string; + prefix_param: string; + delimiter_param: string; + max_keys?: number; + start_after?: string; + next_token?: string; + }; + Returns: { + name: string; + id: string; + metadata: Json; + updated_at: string; + }[]; + }; + operation: { + Args: Record; + Returns: string; + }; + search: { + Args: { + prefix: string; + bucketname: string; + limits?: number; + levels?: number; + offsets?: number; + search?: string; + sortcolumn?: string; + sortorder?: string; + }; + Returns: { + name: string; + id: string; + updated_at: string; + created_at: string; + last_accessed_at: string; + metadata: Json; + }[]; + }; + }; + Enums: { + [_ in never]: never; + }; + CompositeTypes: { + [_ in never]: never; + }; + }; }; type PublicSchema = Database[Extract]; diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index a942db2..b5e9ce3 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -22,8 +22,9 @@ export const pluginSettingsSchema = T.Object( { matchThreshold: T.Number(), warningThreshold: T.Number(), + jobMatchingThreshold: T.Number(), }, - { default: { matchThreshold: 0.95, warningThreshold: 0.75 } } + { default: { matchThreshold: 0.95, warningThreshold: 0.75, jobMatchingThreshold: 0.75 } } ); export const pluginSettingsValidator = new StandardValidator(pluginSettingsSchema); diff --git a/tests/main.test.ts b/tests/main.test.ts index a8c14b8..bc4ed19 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -166,6 +166,7 @@ function createContextInner( config: { warningThreshold: 0.75, matchThreshold: 0.95, + jobMatchingThreshold: 0.95, }, adapters: {} as Context["adapters"], logger: new Logs("debug"),