From 3de383f5fa80f50a1ad069d604e850b4611eea59 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 14:49:43 +0100 Subject: [PATCH 01/72] chore(deps): openAi --- package.json | 4 +- yarn.lock | 133 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 136 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index cbe5fb0..917a756 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "@sinclair/typebox": "0.32.33", "@supabase/supabase-js": "2.43.5", "dotenv": "16.4.5", + "openai": "^4.52.7", "typebox-validators": "0.3.5" }, "devDependencies": { @@ -79,5 +80,6 @@ "extends": [ "@commitlint/config-conventional" ] - } + }, + "packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" } diff --git a/yarn.lock b/yarn.lock index d4049db..fedf3d4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1946,6 +1946,14 @@ dependencies: "@types/node" "*" +"@types/node-fetch@^2.6.4": + version "2.6.11" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" + integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== + dependencies: + "@types/node" "*" + form-data "^4.0.0" + "@types/node-forge@^1.3.0": version "1.3.11" resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da" @@ -1967,6 +1975,13 @@ dependencies: undici-types "~5.26.4" +"@types/node@^18.11.18": + version "18.19.39" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.39.tgz#c316340a5b4adca3aee9dcbf05de385978590593" + integrity sha512-nPwTRDKUctxw3di5b4TfT3I0sWDiWoPQCZjXhvdkINntwr8lcoVCKsTgnXeRubKIlfnV+eN/HYk6Jb40tbcEAQ== + dependencies: + undici-types "~5.26.4" + "@types/phoenix@^1.5.4": version "1.6.4" resolved "https://registry.yarnpkg.com/@types/phoenix/-/phoenix-1.6.4.tgz#cceac93a827555473ad38057d1df7d06eef1ed71" @@ -2105,6 +2120,13 @@ JSONStream@^1.3.5: jsonparse "^1.2.0" through ">=2.2.7 <3" +abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== + dependencies: + event-target-shim "^5.0.0" + acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" @@ -2125,6 +2147,13 @@ acorn@^8.8.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== +agentkeepalive@^4.2.1: + version "4.5.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" + integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== + dependencies: + humanize-ms "^1.2.1" + aggregate-error@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" @@ -2288,6 +2317,11 @@ async-lock@^1.4.1: resolved "https://registry.yarnpkg.com/async-lock/-/async-lock-1.4.1.tgz#56b8718915a9b68b10fce2f2a9a3dddf765ef53f" integrity sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ== +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + available-typed-arrays@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz#a5cc375d6a03c2efc87a553f3e0b1522def14846" @@ -2637,6 +2671,13 @@ colorette@^2.0.20: resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + commander@^12.1.0, commander@~12.1.0: version "12.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-12.1.0.tgz#01423b36f501259fdaac4d0e4d60c96c991585d3" @@ -2998,6 +3039,11 @@ defu@^6.1.4: resolved "https://registry.yarnpkg.com/defu/-/defu-6.1.4.tgz#4e0c9cf9ff68fe5f3d7f2765cc1a012dfdcb0479" integrity sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg== +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + deprecation@^2.0.0: version "2.3.1" resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" @@ -3381,6 +3427,11 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== +event-target-shim@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== + eventemitter3@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" @@ -3556,6 +3607,28 @@ for-each@^0.3.3: dependencies: is-callable "^1.1.3" +form-data-encoder@1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" + integrity sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A== + +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +formdata-node@^4.3.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-4.4.1.tgz#23f6a5cb9cb55315912cbec4ff7b0f59bbd191e2" + integrity sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ== + dependencies: + node-domexception "1.0.0" + web-streams-polyfill "4.0.0-beta.3" + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -3841,6 +3914,13 @@ human-signals@^5.0.0: resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28" integrity sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ== +humanize-ms@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" + integrity sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ== + dependencies: + ms "^2.0.0" + husky@9.0.11: version "9.0.11" resolved "https://registry.yarnpkg.com/husky/-/husky-9.0.11.tgz#fc91df4c756050de41b3e478b2158b87c1e79af9" @@ -4928,6 +5008,18 @@ micromatch@^4.0.4, micromatch@^4.0.5, micromatch@^4.0.7, micromatch@~4.0.7: braces "^3.0.3" picomatch "^2.3.1" +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + mime@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" @@ -5002,6 +5094,11 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== +ms@^2.0.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + msw@^2.0.8: version "2.3.1" resolved "https://registry.yarnpkg.com/msw/-/msw-2.3.1.tgz#bfc73e256ffc2c74ec4381b604abb258df35f32b" @@ -5050,11 +5147,23 @@ nice-try@^1.0.4: resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== +node-domexception@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" + integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== + node-fetch-native@^1.6.4: version "1.6.4" resolved "https://registry.yarnpkg.com/node-fetch-native/-/node-fetch-native-1.6.4.tgz#679fc8fd8111266d47d7e72c379f1bed9acff06e" integrity sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ== +node-fetch@^2.6.7: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + node-forge@^1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -5165,6 +5274,20 @@ onetime@^6.0.0: dependencies: mimic-fn "^4.0.0" +openai@^4.52.7: + version "4.52.7" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.7.tgz#e32b000142287a9e8eda8512ba28df33d11ec1f1" + integrity sha512-dgxA6UZHary6NXUHEDj5TWt8ogv0+ibH+b4pT5RrWMjiRZVylNwLcw/2ubDrX5n0oUmHX/ZgudMJeemxzOvz7A== + dependencies: + "@types/node" "^18.11.18" + "@types/node-fetch" "^2.6.4" + abort-controller "^3.0.0" + agentkeepalive "^4.2.1" + form-data-encoder "1.7.2" + formdata-node "^4.3.2" + node-fetch "^2.6.7" + web-streams-polyfill "^3.2.1" + optionator@^0.9.3: version "0.9.4" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" @@ -6351,6 +6474,16 @@ wcwidth@^1.0.1: dependencies: defaults "^1.0.3" +web-streams-polyfill@4.0.0-beta.3: + version "4.0.0-beta.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" + integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== + +web-streams-polyfill@^3.2.1: + version "3.3.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b" + integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw== + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" From bf8d4920262e93473a960ddf4e0267404778c6ca Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 14:51:56 +0100 Subject: [PATCH 02/72] chore: settings config --- src/types/context.ts | 2 +- src/types/env.ts | 5 +---- src/types/plugin-inputs.ts | 7 ++++++- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/types/context.ts b/src/types/context.ts index 45a0266..903894f 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -4,7 +4,7 @@ import { createAdapters } from "../adapters"; import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; -export type SupportedEventsU = "issue_comment.created"; // Add more events here +export type SupportedEventsU = "issue_comment.created"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; diff --git a/src/types/env.ts b/src/types/env.ts index 512e64e..19bc3c3 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -3,10 +3,7 @@ import { StaticDecode } from "@sinclair/typebox"; import "dotenv/config"; import { StandardValidator } from "typebox-validators"; -export const envSchema = T.Object({ - SUPABASE_URL: T.String(), - SUPABASE_KEY: T.String(), -}); +export const envSchema = T.Object({}); export const envValidator = new StandardValidator(envSchema); diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 00d0a52..166b623 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -18,7 +18,12 @@ export interface PluginInputs; From d8de447fec3d30787f6e4a5edc5da4a26440442b Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 14:53:37 +0100 Subject: [PATCH 03/72] chore: remove supabase --- package.json | 3 +- src/adapters/index.ts | 17 ----- src/adapters/supabase/helpers/access.ts | 49 ------------- src/adapters/supabase/helpers/label.ts | 52 -------------- src/adapters/supabase/helpers/supabase.ts | 12 ---- src/adapters/supabase/helpers/user.ts | 84 ----------------------- src/plugin.ts | 6 +- src/types/context.ts | 3 +- 8 files changed, 3 insertions(+), 223 deletions(-) delete mode 100644 src/adapters/index.ts delete mode 100644 src/adapters/supabase/helpers/access.ts delete mode 100644 src/adapters/supabase/helpers/label.ts delete mode 100644 src/adapters/supabase/helpers/supabase.ts delete mode 100644 src/adapters/supabase/helpers/user.ts diff --git a/package.json b/package.json index 917a756..7eccaea 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,6 @@ "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", - "@supabase/supabase-js": "2.43.5", "dotenv": "16.4.5", "openai": "^4.52.7", "typebox-validators": "0.3.5" @@ -82,4 +81,4 @@ ] }, "packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" -} +} \ No newline at end of file diff --git a/src/adapters/index.ts b/src/adapters/index.ts deleted file mode 100644 index 23fb4b3..0000000 --- a/src/adapters/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Context } from "../types/context"; -import { Access } from "./supabase/helpers/access"; -import { User } from "./supabase/helpers/user"; -import { Label } from "./supabase/helpers/label"; -import { Super } from "./supabase/helpers/supabase"; - -export function createAdapters(supabaseClient: SupabaseClient, context: Context) { - return { - supabase: { - access: new Access(supabaseClient, context), - user: new User(supabaseClient, context), - label: new Label(supabaseClient, context), - super: new Super(supabaseClient, context), - }, - }; -} diff --git a/src/adapters/supabase/helpers/access.ts b/src/adapters/supabase/helpers/access.ts deleted file mode 100644 index dc32281..0000000 --- a/src/adapters/supabase/helpers/access.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Super } from "./supabase"; -import { Context } from "../../../types/context"; - -export class Access extends Super { - constructor(supabase: SupabaseClient, context: Context) { - super(supabase, context); - } - - public async getAccess(userId: number, repositoryId: number) { - const { data, error } = await this.supabase - .from("access") - .select("*") - .filter("user_id", "eq", userId) - .filter("repository_id", "eq", repositoryId) - .limit(1) - .maybeSingle(); - - if (error) { - this.context.logger.fatal(error.message, error); - throw new Error(error.message); - } - return data; - } - - public async setAccess(userId: number, repositoryId: number, labels: string[]) { - if (!labels.length) { - return this.clearAccess(userId, repositoryId); - } - const { data, error } = await this.supabase - .from("access") - .upsert({ - user_id: userId, - repository_id: repositoryId, - labels: labels, - }) - .select() - .maybeSingle(); - - if (error) throw new Error(error.message); - return data; - } - - public async clearAccess(userId: number, repositoryId: number): Promise { - const { data, error } = await this.supabase.from("access").delete().filter("user_id", "eq", userId).filter("repository_id", "eq", repositoryId); - if (error) throw new Error(error.message); - return data; - } -} diff --git a/src/adapters/supabase/helpers/label.ts b/src/adapters/supabase/helpers/label.ts deleted file mode 100644 index 77e0288..0000000 --- a/src/adapters/supabase/helpers/label.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Super } from "./supabase"; -import { Context } from "../../../types/context"; - -export class Label extends Super { - constructor(supabase: SupabaseClient, context: Context) { - super(supabase, context); - } - - async saveLabelChange({ - previousLabel, - currentLabel, - authorized, - userId, - repositoryId, - }: { - previousLabel: string; - currentLabel: string; - authorized: boolean; - userId: number; - repositoryId: number; - }) { - const { data, error } = await this.supabase - .from("labels") - .insert({ - label_from: previousLabel, - label_to: currentLabel, - authorized: authorized, - user_id: userId, - repository_id: repositoryId, - }) - .select() - .single(); - - if (error) throw new Error(error.message); - return data; - } - - async getLabelChanges(repositoryNodeId: string) { - const { data, error } = await this.supabase.from("labels").select("*").eq("repository_id", repositoryNodeId).eq("authorized", false); - - if (error) throw new Error(error.message); - return data; - } - - async approveLabelChange(id: number): Promise { - const { data, error } = await this.supabase.from("labels").update({ authorized: true }).eq("id", id); - - if (error) throw new Error(error.message); - return data; - } -} diff --git a/src/adapters/supabase/helpers/supabase.ts b/src/adapters/supabase/helpers/supabase.ts deleted file mode 100644 index 7a13b85..0000000 --- a/src/adapters/supabase/helpers/supabase.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Context } from "../../../types/context"; - -export class Super { - protected supabase: SupabaseClient; - protected context: Context; - - constructor(supabase: SupabaseClient, context: Context) { - this.supabase = supabase; - this.context = context; - } -} diff --git a/src/adapters/supabase/helpers/user.ts b/src/adapters/supabase/helpers/user.ts deleted file mode 100644 index fa8b687..0000000 --- a/src/adapters/supabase/helpers/user.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { SupabaseClient } from "@supabase/supabase-js"; -import { Super } from "./supabase"; -import { Context } from "../../../types/context"; - -type Wallet = { - address: string; -}; - -export class User extends Super { - user_id: string | undefined; - comment_id: string | undefined; - issue_id: string | undefined; - repository_id: string | undefined; - node_id: string | undefined; - node_type: string | undefined; - - constructor(supabase: SupabaseClient, context: Context) { - super(supabase, context); - } - - async getUserById(userId: number, issueNumber: number) { - const { data, error } = await this.supabase.from("users").select("*").eq("id", userId).single(); - if (error) { - console.error(FAILED_TO_GET_USER, { userId, error, issueNumber }); - return null; - } - - console.info(SUCCESSFULLY_FETCHED_USER, { userId, issueNumber, ...data }); - return data; - } - - async getWalletByUserId(userId: number, issueNumber: number) { - const { data, error }: { data: { wallets: Wallet } | null; error: unknown } = await this.supabase - .from("users") - .select("wallets(*)") - .eq("id", userId) - .single(); - if ((error && !data) || !data?.wallets?.address) { - console.error("No wallet address found", { userId, issueNumber }, true); - throw new Error("No wallet address found"); - } - - console.info("Successfully fetched wallet", { userId, address: data.wallets?.address }); - return data.wallets?.address; - } - - public async getMultiplier(userId: number, repositoryId: number) { - const locationData = await this.getLocationsFromRepo(repositoryId); - if (locationData && locationData.length > 0) { - const accessData = await this._getAccessData(locationData, userId); - if (accessData) { - return { - value: accessData.multiplier || null, - reason: accessData.multiplier_reason || null, - }; - } - } - return null; - } - - private async _getAccessData(locationData: { id: number }[], userId: number) { - const locationIdsInCurrentRepository = locationData.map((location) => location.id); - - const { data: accessData, error: accessError } = await this.supabase - .from("access") - .select("multiplier, multiplier_reason") - .in("location_id", locationIdsInCurrentRepository) - .eq("user_id", userId) - .order("id", { ascending: false }) // get the latest one - .maybeSingle(); - if (accessError) throw console.error("Error getting access data", accessError); - return accessData; - } - - public async getLocationsFromRepo(repositoryId: number) { - const { data: locationData, error } = await this.supabase.from("locations").select("id").eq("repository_id", repositoryId); - - if (error) throw console.error("Error getting location data", new Error(error.message)); - return locationData; - } -} - -const FAILED_TO_GET_USER = "Failed to get user"; -const SUCCESSFULLY_FETCHED_USER = "Successfully fetched user"; diff --git a/src/plugin.ts b/src/plugin.ts index c790042..6739dbe 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,6 +1,4 @@ import { Octokit } from "@octokit/rest"; -import { createClient } from "@supabase/supabase-js"; -import { createAdapters } from "./adapters"; import { Env, PluginInputs } from "./types"; import { Context } from "./types"; @@ -9,7 +7,6 @@ import { Context } from "./types"; */ export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); - const supabase = createClient(env.SUPABASE_URL, env.SUPABASE_KEY); const context: Context = { eventName: inputs.eventName, @@ -34,10 +31,9 @@ export async function plugin(inputs: PluginInputs, env: Env) { console.error(message, ...optionalParams); }, }, - adapters: {} as ReturnType, + adapters: {} as never, }; - context.adapters = createAdapters(supabase, context); if (context.eventName === "issue_comment.created") { // do something diff --git a/src/types/context.ts b/src/types/context.ts index 903894f..5869fd8 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -1,6 +1,5 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; -import { createAdapters } from "../adapters"; import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; @@ -14,7 +13,7 @@ export interface Context; - adapters: ReturnType; + adapters: never config: PluginSettings; env: Env; logger: { From 12cbcc4ed767d6b69ce0f823d9e84dfa9c5c5e7c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 14:59:35 +0100 Subject: [PATCH 04/72] feat: add commnt with diff styles --- src/handlers/add-comment.ts | 28 ++++++++++++++++++++++++++++ src/plugin.ts | 19 +++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 src/handlers/add-comment.ts diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts new file mode 100644 index 0000000..fe8d732 --- /dev/null +++ b/src/handlers/add-comment.ts @@ -0,0 +1,28 @@ +import { Context } from "../types/context"; + +const diffStyles = { + warning: "```diff\n! ", + error: "```diff\n- ", + success: "```diff\n+ ", + info: "```diff\n# ", +}; + +export async function addCommentToIssue(context: Context, message: string, diff = false, diffStyle?: keyof typeof diffStyles) { + const { payload } = context; + const issueNumber = payload.issue.number; + + if (diff && diffStyle) { + message = `${diffStyles[diffStyle]}${message}\n\`\`\``; + } + + try { + await context.octokit.issues.createComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + issue_number: issueNumber, + body: message, + }); + } catch (e: unknown) { + context.logger.fatal("Adding a comment failed!", e); + } +} diff --git a/src/plugin.ts b/src/plugin.ts index 6739dbe..f9bb6ea 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,6 +1,7 @@ import { Octokit } from "@octokit/rest"; import { Env, PluginInputs } from "./types"; import { Context } from "./types"; +import { addCommentToIssue } from "./handlers/add-comment"; /** * How a worker executes the plugin. @@ -37,6 +38,24 @@ export async function plugin(inputs: PluginInputs, env: Env) { if (context.eventName === "issue_comment.created") { // do something + const comment = context.payload.comment.body; + if (!comment.startsWith("/gpt")) { + context.logger.info("Comment does not start with /gpt. Skipping."); + return; + } + + const { isEnabled } = context.config; + if (!isEnabled) { + context.logger.info("Plugin is disabled. Skipping."); + await addCommentToIssue(context, "The /gpt command is disabled. Enable it in the plugin settings.", true, "warning"); + return; + } + + + + + + } else { context.logger.error(`Unsupported event: ${context.eventName}`); } From b9c1ab57ed7c5e47d3fc1eb5fb6baa77a9321b12 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 15:03:39 +0100 Subject: [PATCH 05/72] feat: simple openai chat fn --- src/handlers/ask-gpt.ts | 45 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 src/handlers/ask-gpt.ts diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts new file mode 100644 index 0000000..8e942c2 --- /dev/null +++ b/src/handlers/ask-gpt.ts @@ -0,0 +1,45 @@ +import OpenAI from "openai"; +import { ChatCompletionMessageParam } from "openai/resources"; +import { addCommentToIssue } from "./add-comment"; +import { Context } from "../types"; + +export async function askGPT(context: Context, chatHistory: ChatCompletionMessageParam[]) { + const { + logger, + config: { + openAi_apiKey: openAi, + }, + } = context; + + if (!openAi) { + logger.error(`No OpenAI API Key provided`); + await addCommentToIssue(context, "No OpenAI API Key detected!", true, "error"); // TOO confirm correct style here + return; + } + + const openAI = new OpenAI({ + apiKey: openAi, + }); + + const res: OpenAI.Chat.Completions.ChatCompletion = await openAI.chat.completions.create({ + messages: chatHistory, + model: "gpt-4o", + temperature: 0, + }); + + if (!res.choices) { + logger.error(`No response from OpenAI`); + await addCommentToIssue(context, "No response from OpenAI", true, "error"); + return; + } + + const answer = res.choices[0].message.content; + + const tokenUsage = { + output: res.usage?.completion_tokens, + input: res.usage?.prompt_tokens, + total: res.usage?.total_tokens, + }; + + return { answer, tokenUsage }; +} From 7643b3fb9da52feb0c57b0b3b7722f3fcbb003fe Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 15:49:46 +0100 Subject: [PATCH 06/72] feat: issue related functions --- src/handlers/context-handler.ts | 122 ++++++++++++++++++++++++++++++++ src/plugin.ts | 5 +- src/types/github.ts | 4 ++ 3 files changed, 130 insertions(+), 1 deletion(-) create mode 100644 src/handlers/context-handler.ts create mode 100644 src/types/github.ts diff --git a/src/handlers/context-handler.ts b/src/handlers/context-handler.ts new file mode 100644 index 0000000..4578781 --- /dev/null +++ b/src/handlers/context-handler.ts @@ -0,0 +1,122 @@ +import { Context } from "../types"; +import { Issue, IssueComments } from "../types/github"; + +type FetchParams = { + context: Context; + issueNum?: number; + owner?: string; + repo?: string; +}; + +/** + * Becuase in the eyes of the GitHub api Pull Requests are also + * issues, we can use the same functions for both. + */ + +export async function fetchIssue(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.issues.get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); +} + +export async function fetchIssueComments(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.paginate(octokit.issues.listComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }) +} + +export async function fetchLinkedIssues(params: FetchParams, comments?: IssueComments) { + let issueComments: IssueComments | undefined = comments; + const linkedIssues: { + issueNumber: number; + owner: string; + repo: string; + }[] = []; + + if (!issueComments && !params) { + throw new Error("Either issueComments or params must be provided"); + } + + if (!issueComments) { + issueComments = await fetchIssueComments(params); + } + + const { context: { logger, payload: { repository: { owner: { login } } } } } = params + + if (!issueComments) { + logger.info("No comments found on issue"); + return linkedIssues + } + + for (const comment of issueComments) { + const linkedIssue = idIssueFromComment(login, comment.body); + if (linkedIssue && linkedIssue.issueNumber && linkedIssue.repo) { + if (await isRepoFromSameOrg(params.context, linkedIssue.repo, login)) { + linkedIssues.push({ + issueNumber: linkedIssue.issueNumber, + owner: login, + repo: linkedIssue.repo + }); + } else { + logger.info(`Ignoring linked issue ${linkedIssue.issueNumber} from ${linkedIssue.repo} as it is not from the same org`); + } + } + } + + return linkedIssues; +} + +function idIssueFromComment(owner: string, comment?: string) { + if (!comment) return + // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag + const urlMatch = comment.match(/https:\/\/github.com\/([^/]+)\/([^/]+)\/(pull|issue)\/(\d+)/); + + /** + * I think we should restrict including any linked context which is not of the same org. + * + * In most cases this will be the expected behaviour, I remember a scenario where + * I linked to an issue in a 3rd party org, for extra reviewer context but I also include the + * TL;DR which is always the case. We wouldn't want that full 3rd party PR review or issue to be + * included in the context. + */ + + const linkedIssue: { + issueNumber: number; + repo: string; + } = { + issueNumber: 0, + repo: "" + }; + + /** + * If following the rule that only issues from the same org should be included + * then we need to be sure that this format of linked issue is from the same org. + */ + + if (urlMatch && urlMatch[1] === owner) { + linkedIssue.issueNumber = parseInt(urlMatch[4]); + linkedIssue.repo = urlMatch[2]; + } + + return linkedIssue; +} + +async function isRepoFromSameOrg(context: Context, repo: string, owner: string) { + const { octokit } = context; + const { data } = await octokit.repos.get({ + owner, + repo + }); + + return data.owner.login === owner; +} \ No newline at end of file diff --git a/src/plugin.ts b/src/plugin.ts index f9bb6ea..7e746d2 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -35,7 +35,6 @@ export async function plugin(inputs: PluginInputs, env: Env) { adapters: {} as never, }; - if (context.eventName === "issue_comment.created") { // do something const comment = context.payload.comment.body; @@ -45,6 +44,7 @@ export async function plugin(inputs: PluginInputs, env: Env) { } const { isEnabled } = context.config; + if (!isEnabled) { context.logger.info("Plugin is disabled. Skipping."); await addCommentToIssue(context, "The /gpt command is disabled. Enable it in the plugin settings.", true, "warning"); @@ -56,6 +56,9 @@ export async function plugin(inputs: PluginInputs, env: Env) { + + + } else { context.logger.error(`Unsupported event: ${context.eventName}`); } diff --git a/src/types/github.ts b/src/types/github.ts new file mode 100644 index 0000000..908aad0 --- /dev/null +++ b/src/types/github.ts @@ -0,0 +1,4 @@ +import { RestEndpointMethodTypes } from "@octokit/rest"; + +export type Issue = RestEndpointMethodTypes["issues"]["get"]["response"]["data"]; +export type IssueComments = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"]; From bfac23dece9c966247c97dc2d05fbcae4ef2421a Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Fri, 12 Jul 2024 15:55:35 +0100 Subject: [PATCH 07/72] fix: improved context issue filtering --- src/handlers/context-handler.ts | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/src/handlers/context-handler.ts b/src/handlers/context-handler.ts index 4578781..ccb0e79 100644 --- a/src/handlers/context-handler.ts +++ b/src/handlers/context-handler.ts @@ -39,7 +39,6 @@ export async function fetchLinkedIssues(params: FetchParams, comments?: IssueCom let issueComments: IssueComments | undefined = comments; const linkedIssues: { issueNumber: number; - owner: string; repo: string; }[] = []; @@ -60,20 +59,36 @@ export async function fetchLinkedIssues(params: FetchParams, comments?: IssueCom for (const comment of issueComments) { const linkedIssue = idIssueFromComment(login, comment.body); - if (linkedIssue && linkedIssue.issueNumber && linkedIssue.repo) { - if (await isRepoFromSameOrg(params.context, linkedIssue.repo, login)) { - linkedIssues.push({ - issueNumber: linkedIssue.issueNumber, - owner: login, - repo: linkedIssue.repo + if (linkedIssue) { + linkedIssues.push(linkedIssue); + } + } + + return await filterLinkedIssues(params, linkedIssues); +} + +async function filterLinkedIssues(params: FetchParams, linkedIssues: { issueNumber: number; repo: string; }[]) { + const { context: { logger, payload: { repository: { owner: { login } } } } } = params + + const contextIssues: { + issueNumber: number; + repo: string; + }[] = []; + + for (const issue of linkedIssues) { + if (issue && issue.issueNumber && issue.repo) { + if (await isRepoFromSameOrg(params.context, issue.repo, login)) { + contextIssues.push({ + issueNumber: issue.issueNumber, + repo: issue.repo }); } else { - logger.info(`Ignoring linked issue ${linkedIssue.issueNumber} from ${linkedIssue.repo} as it is not from the same org`); + logger.info(`Ignoring linked issue ${issue.issueNumber} from ${issue.repo} as it is not from the same org`); } } } - return linkedIssues; + return contextIssues; } function idIssueFromComment(owner: string, comment?: string) { From 554f3f8471ac44f7134cbc2fdc1bf87c12fde5cb Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 02:39:26 +0100 Subject: [PATCH 08/72] chore: types and plugin entry --- .github/workflows/compute.yml | 12 +-- src/handlers/context-handler.ts | 137 -------------------------------- src/plugin.ts | 32 +++++--- src/types/context.ts | 2 +- src/types/gpt.ts | 12 +++ 5 files changed, 39 insertions(+), 156 deletions(-) delete mode 100644 src/handlers/context-handler.ts create mode 100644 src/types/gpt.ts diff --git a/.github/workflows/compute.yml b/.github/workflows/compute.yml index 3d204b1..9a34dce 100644 --- a/.github/workflows/compute.yml +++ b/.github/workflows/compute.yml @@ -1,4 +1,4 @@ -name: "the name of the plugin" +name: "ubiquibot-gpt-command" on: workflow_dispatch: @@ -18,12 +18,9 @@ on: jobs: compute: - name: "plugin name" + name: "gpt-command" runs-on: ubuntu-latest permissions: write-all - env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL }} - SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} steps: - uses: actions/checkout@v4 @@ -38,7 +35,4 @@ jobs: - name: execute directive run: npx tsx ./src/main.ts - id: plugin-name - env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL }} - SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} + id: gpt-command diff --git a/src/handlers/context-handler.ts b/src/handlers/context-handler.ts deleted file mode 100644 index ccb0e79..0000000 --- a/src/handlers/context-handler.ts +++ /dev/null @@ -1,137 +0,0 @@ -import { Context } from "../types"; -import { Issue, IssueComments } from "../types/github"; - -type FetchParams = { - context: Context; - issueNum?: number; - owner?: string; - repo?: string; -}; - -/** - * Becuase in the eyes of the GitHub api Pull Requests are also - * issues, we can use the same functions for both. - */ - -export async function fetchIssue(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; - - return await octokit.issues.get({ - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }); -} - -export async function fetchIssueComments(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; - - return await octokit.paginate(octokit.issues.listComments, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }) -} - -export async function fetchLinkedIssues(params: FetchParams, comments?: IssueComments) { - let issueComments: IssueComments | undefined = comments; - const linkedIssues: { - issueNumber: number; - repo: string; - }[] = []; - - if (!issueComments && !params) { - throw new Error("Either issueComments or params must be provided"); - } - - if (!issueComments) { - issueComments = await fetchIssueComments(params); - } - - const { context: { logger, payload: { repository: { owner: { login } } } } } = params - - if (!issueComments) { - logger.info("No comments found on issue"); - return linkedIssues - } - - for (const comment of issueComments) { - const linkedIssue = idIssueFromComment(login, comment.body); - if (linkedIssue) { - linkedIssues.push(linkedIssue); - } - } - - return await filterLinkedIssues(params, linkedIssues); -} - -async function filterLinkedIssues(params: FetchParams, linkedIssues: { issueNumber: number; repo: string; }[]) { - const { context: { logger, payload: { repository: { owner: { login } } } } } = params - - const contextIssues: { - issueNumber: number; - repo: string; - }[] = []; - - for (const issue of linkedIssues) { - if (issue && issue.issueNumber && issue.repo) { - if (await isRepoFromSameOrg(params.context, issue.repo, login)) { - contextIssues.push({ - issueNumber: issue.issueNumber, - repo: issue.repo - }); - } else { - logger.info(`Ignoring linked issue ${issue.issueNumber} from ${issue.repo} as it is not from the same org`); - } - } - } - - return contextIssues; -} - -function idIssueFromComment(owner: string, comment?: string) { - if (!comment) return - // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag - const urlMatch = comment.match(/https:\/\/github.com\/([^/]+)\/([^/]+)\/(pull|issue)\/(\d+)/); - - /** - * I think we should restrict including any linked context which is not of the same org. - * - * In most cases this will be the expected behaviour, I remember a scenario where - * I linked to an issue in a 3rd party org, for extra reviewer context but I also include the - * TL;DR which is always the case. We wouldn't want that full 3rd party PR review or issue to be - * included in the context. - */ - - const linkedIssue: { - issueNumber: number; - repo: string; - } = { - issueNumber: 0, - repo: "" - }; - - /** - * If following the rule that only issues from the same org should be included - * then we need to be sure that this format of linked issue is from the same org. - */ - - if (urlMatch && urlMatch[1] === owner) { - linkedIssue.issueNumber = parseInt(urlMatch[4]); - linkedIssue.repo = urlMatch[2]; - } - - return linkedIssue; -} - -async function isRepoFromSameOrg(context: Context, repo: string, owner: string) { - const { octokit } = context; - const { data } = await octokit.repos.get({ - owner, - repo - }); - - return data.owner.login === owner; -} \ No newline at end of file diff --git a/src/plugin.ts b/src/plugin.ts index 7e746d2..ee5dd76 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,6 +1,7 @@ import { Octokit } from "@octokit/rest"; -import { Env, PluginInputs } from "./types"; +import { Env, PluginInputs, SupportedEventsU } from "./types"; import { Context } from "./types"; +import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; /** @@ -35,14 +36,19 @@ export async function plugin(inputs: PluginInputs, env: Env) { adapters: {} as never, }; - if (context.eventName === "issue_comment.created") { - // do something + if (isSupportedEvent(context.eventName)) { const comment = context.payload.comment.body; + if (!comment.startsWith("/gpt")) { context.logger.info("Comment does not start with /gpt. Skipping."); return; } + if (context.payload.comment.user?.type === "Bot") { + context.logger.info("Comment is from a bot. Skipping."); + return; + } + const { isEnabled } = context.config; if (!isEnabled) { @@ -51,15 +57,23 @@ export async function plugin(inputs: PluginInputs, env: Env) { return; } + const response = await askQuestion(context, comment.slice(4).trim()); - - - - - - + if (response) { + const { answer, tokenUsage } = response + if (!answer) { + context.logger.error(`No answer from OpenAI`); + return; + } + context.logger.info(`Answer: ${answer}`, { tokenUsage }); + await addCommentToIssue(context, answer); + } } else { context.logger.error(`Unsupported event: ${context.eventName}`); } } + +function isSupportedEvent(eventName: string): eventName is SupportedEventsU { + return eventName === "issue_comment.created"; +} diff --git a/src/types/context.ts b/src/types/context.ts index 5869fd8..ec417f6 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -3,7 +3,7 @@ import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as Webhook import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; -export type SupportedEventsU = "issue_comment.created"; +export type SupportedEventsU = "issue_comment.created" export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; diff --git a/src/types/gpt.ts b/src/types/gpt.ts new file mode 100644 index 0000000..9d7bde7 --- /dev/null +++ b/src/types/gpt.ts @@ -0,0 +1,12 @@ +export type StreamlinedComment = { + user?: string; + body?: string; + id: number; +} + +export type StreamlinedComments = { + issue: number; + repo: string; + org: string; + comments: StreamlinedComment[]; +} \ No newline at end of file From 55d5b2a263d1d91c154e47297cedced4c104481b Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 02:40:47 +0100 Subject: [PATCH 09/72] feat: issue utils --- src/utils/issue.ts | 159 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 159 insertions(+) create mode 100644 src/utils/issue.ts diff --git a/src/utils/issue.ts b/src/utils/issue.ts new file mode 100644 index 0000000..0b827cc --- /dev/null +++ b/src/utils/issue.ts @@ -0,0 +1,159 @@ +import { Context } from "../types"; +import { Issue, IssueComments } from "../types/github"; + +type FetchParams = { + context: Context; + issueNum?: number; + owner?: string; + repo?: string; +}; + +/** + * Because in the eyes of the GitHub api Pull Requests are also + * issues, we can use the same functions for both. + */ + +export async function fetchIssue(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.issues.get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }).then(({ data }) => data as Issue); +} + +export async function fetchIssueComments(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.paginate(octokit.issues.listComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }).then((comments) => comments as IssueComments); +} + +export async function fetchLinkedIssues(params: FetchParams, comments?: IssueComments) { + let issueComments: IssueComments | undefined = comments; + const linkedIssues: { + issueNumber: number; + repo: string; + }[] = []; + + if (!issueComments && !params) { + throw new Error("Either issueComments or params must be provided"); + } + + if (!issueComments) { + issueComments = await fetchIssueComments(params); + } + + const { context: { logger, payload: { repository: { owner: { login } } } } } = params + + if (!issueComments) { + logger.info("No comments found on issue"); + return linkedIssues + } + + for (const comment of issueComments) { + const linkedIssue = idIssueFromComment(login, comment.body); + if (linkedIssue) { + linkedIssues.push(linkedIssue); + } + } + + return await filterLinkedIssues(params, linkedIssues); +} + +async function filterLinkedIssues(params: FetchParams, linkedIssues: { issueNumber: number; repo: string; }[]) { + const { context: { logger, payload: { repository: { owner: { login } } } } } = params + + const contextIssues: { + issueNumber: number; + repo: string; + }[] = []; + + for (const issue of linkedIssues) { + if (issue && issue.issueNumber && issue.repo) { + if (await isRepoFromSameOrg(params.context, issue.repo, login)) { + contextIssues.push({ + issueNumber: issue.issueNumber, + repo: issue.repo + }); + } else { + logger.info(`Ignoring linked issue ${issue.issueNumber} from ${issue.repo} as it is not from the same org`); + } + } + } + + return contextIssues; +} + +export function idIssueFromComment(owner?: string, comment?: string | null) { + if (!comment) { + return null; + } + if (!owner) { + throw new Error("Owner must be provided when parsing linked issues"); + } + // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag + const urlMatch = comment.match(/https:\/\/github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); + + /** + * I think we should restrict including any linked context which is not of the same org. + * + * In most cases this will be the expected behaviour, I remember a scenario where + * I linked to an issue in a 3rd party org, for extra reviewer context but I also include the + * TL;DR which is always the case. We wouldn't want that full 3rd party PR review or issue to be + * included in the context. + */ + + const linkedIssue: { + issueNumber: number; + repo: string; + } = { + issueNumber: 0, + repo: "" + }; + + /** + * If following the rule that only issues from the same org should be included + * then we need to be sure that this format of linked issue is from the same org. + */ + + if (urlMatch && urlMatch[1] === owner) { + linkedIssue.issueNumber = parseInt(urlMatch[4]); + linkedIssue.repo = urlMatch[2]; + } + + return linkedIssue; +} + +async function isRepoFromSameOrg(context: Context, repo: string, owner: string) { + const { octokit } = context; + const { data } = await octokit.repos.get({ + owner, + repo + }); + + return data.owner.login === owner; +} + + +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: string) { + const { logger, octokit } = context; + + try { + const diff = await octokit.pulls.get({ + owner: org, + repo, + pull_number: parseInt(issue) + }); + return diff.data; + } catch (error) { + logger.error(`Error fetching pull request diff: ${error}`); + return null; + } +} \ No newline at end of file From bd790b5f5c4e58eecaa656896aeabe2ce2667471 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 03:26:12 +0100 Subject: [PATCH 10/72] feat: chat ready --- src/handlers/ask-gpt.ts | 216 ++++++++++++++++++++++++++++--- src/plugin.ts | 23 ++-- src/utils/format-chat-history.ts | 101 +++++++++++++++ src/utils/issue.ts | 19 +++ 4 files changed, 334 insertions(+), 25 deletions(-) create mode 100644 src/utils/format-chat-history.ts diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 8e942c2..1d2f709 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,29 +1,215 @@ import OpenAI from "openai"; -import { ChatCompletionMessageParam } from "openai/resources"; -import { addCommentToIssue } from "./add-comment"; import { Context } from "../types"; +import { fetchIssue, fetchIssueComments, getLinkedIssueContextFromComments, idIssueFromComment } from "../utils/issue"; +import { IssueComments } from "../types/github"; +import { StreamlinedComment } from "../types/gpt"; +import { createChatHistory, formatChatHistory } from "../utils/format-chat-history"; +import { addCommentToIssue } from "./add-comment"; + +export async function askQuestion(context: Context, question: string) { + const { logger, payload: { issue: currentIssue } } = context; + + if (!question) { + logger.error(`No question provided`); + await addCommentToIssue(context, "No question provided", true, "error"); + return; + } + + const { body: issueSpecOrPullBody, repository_url } = currentIssue; + const org = repository_url.split("/")[4]; -export async function askGPT(context: Context, chatHistory: ChatCompletionMessageParam[]) { const { - logger, - config: { - openAi_apiKey: openAi, - }, - } = context; + specReferencedIssueBody, + specReferencedIssueKey, + streamlinedSpecReferencedIssueComments, + } = await getSpecReferencedContext(context, org, issueSpecOrPullBody); + + const issueComments = await fetchIssueComments({ context }); + const linkedIssueContext = await getLinkedIssueContextFromComments(context, issueComments); + const { linkedIssues, linkedIssueComments } = linkedIssueContext; + + // we are only going one level deep with the linked issue context fetching + for (const issue of linkedIssues) { + console.log(`Fetching linked issue ${issue.issueNumber}`) + const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }) + linkedIssueComments.push(...fetched); + } + + const streamlinedComments = await getAllStreamlinedComments(issueComments, streamlinedSpecReferencedIssueComments, linkedIssueComments); + const { linkedPulls, specAndBodies } = await getSpecBodiesAndLinkedPulls(context, repository_url, currentIssue.number, issueSpecOrPullBody, specReferencedIssueBody, specReferencedIssueKey, linkedIssues); + const formattedChat = formatChatHistory(context, streamlinedComments, specAndBodies, linkedPulls); + + return await askGPT(context, formattedChat); +} + +async function getAllStreamlinedComments( + issueComments: IssueComments, + streamlinedSpecReferencedIssueComments: Record | undefined, + linkedIssueComments: IssueComments +) { + const streamlinedComments = streamlineComments(issueComments) ?? {}; + + if (streamlinedSpecReferencedIssueComments && + Object.keys(streamlinedSpecReferencedIssueComments).length > 0) { + for (const [key, value] of Object.entries(streamlinedSpecReferencedIssueComments)) { + if (!streamlinedComments[key]) { + streamlinedComments[key] = value; + continue; + } + + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; + } + } + + if (linkedIssueComments.length > 0) { + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); + + if (linkedStreamlinedComments) { + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { + if (!streamlinedComments[key]) { + streamlinedComments[key] = value; + continue; + } + + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; + } + } + } + + return streamlinedComments; +} + +async function getSpecBodiesAndLinkedPulls( + context: Context, + currentIssueUrl: string, + currentIssueNumber: number, + issueSpecOrPullBody: string | null, + specReferencedIssueBody: string | null | undefined, + specReferencedIssueKey: string | null | undefined, + linkedIssues: { issueNumber: number; repo: string; }[] +) { + const linkedPulls: Record = {}; + const currentIssueKey = createKey(currentIssueUrl, currentIssueNumber); + // collect specifically all of the spec and PR bodies + const specAndBodies: Record = {}; + specAndBodies[currentIssueKey] = issueSpecOrPullBody || ""; + specAndBodies[specReferencedIssueKey as string] = specReferencedIssueBody || ""; + + for (const linkedIssue of linkedIssues) { + const issue = await fetchIssue({ context, issueNum: linkedIssue.issueNumber, repo: linkedIssue.repo }); + const { body, repository_url, pull_request } = issue; + const linkedIssueKey = createKey(repository_url, linkedIssue.issueNumber); + specAndBodies[linkedIssueKey] = body || ""; + + if (pull_request) { + linkedPulls[linkedIssueKey] = true + } + } + + return { specAndBodies, linkedPulls }; +} + +async function getSpecReferencedContext( + context: Context, + org: string, + issueSpecOrPullBody: string | null, +) { + // fetch the spec referenced issue if it exists + const specReferencedIssueId = idIssueFromComment(org, issueSpecOrPullBody); + let specReferencedIssue, + specReferencedIssueBody, + specReferencedIssueRepoUrl, + specReferencedIssueComments, + specReferencedIssueKey, + streamlinedSpecReferencedIssueComments; + + if (specReferencedIssueId) { + specReferencedIssue = await fetchIssue({ context, issueNum: specReferencedIssueId.issueNumber }); + specReferencedIssueBody = specReferencedIssue.body; + specReferencedIssueRepoUrl = specReferencedIssue.repository_url; + specReferencedIssueComments = await fetchIssueComments({ context, issueNum: specReferencedIssueId.issueNumber, repo: specReferencedIssueId.repo }) + specReferencedIssueKey = createKey(specReferencedIssueRepoUrl, specReferencedIssueId?.issueNumber); + streamlinedSpecReferencedIssueComments = streamlineComments(specReferencedIssueComments) ?? {}; + } + + return { + specReferencedIssue, + specReferencedIssueBody, + specReferencedIssueRepoUrl, + specReferencedIssueComments, + specReferencedIssueKey, + streamlinedSpecReferencedIssueComments, + }; +} + +function createKey(issueUrl: string, issue?: number) { + const splitUrl = issueUrl?.split("/"); + const issueNumber = issue || parseInt(splitUrl?.pop() || ""); + const issueRepo = splitUrl?.slice(-2).join("/"); + let issueOrg = splitUrl?.slice(-3, -2).join("/"); + + if (issueOrg.startsWith("repos")) { + return `${issueRepo}/issues/${issueNumber}`; + } + + return `${issueOrg}/${issueRepo}/${issueNumber}`; +} + +function streamlineComments(comments: IssueComments) { + const streamlined: Record = {}; + + for (const comment of comments) { + const user = comment.user?.login; + if ( + user === "ubiquibot" || + user === "ubiquibot[bot]" || + user === "ubiquibot-v2-testing" || + user === "ubiquibot-dev[bot]" || + user === "ubqbot[bot]" || // TODO: remove this + user === "github-actions[bot]" + ) { + const isPreviousAnswer = comment.body?.includes(""); + if (!isPreviousAnswer) continue; + } + + const body = comment.body; + const key = createKey(comment.issue_url); - if (!openAi) { + if (!streamlined[key]) { + streamlined[key] = []; + } + + if (user && body) { + streamlined[key].push({ + user, + body, + id: comment.id, + }); + } + } + return streamlined; +} + +export async function askGPT(context: Context, formattedChat: string) { + const { logger, config: { openAi_apiKey } } = context; + + if (!openAi_apiKey) { logger.error(`No OpenAI API Key provided`); await addCommentToIssue(context, "No OpenAI API Key detected!", true, "error"); // TOO confirm correct style here return; } - const openAI = new OpenAI({ - apiKey: openAi, - }); + const openAI = new OpenAI({ apiKey: openAi_apiKey }); + + const chat = createChatHistory(formattedChat); + + logger.info(`Sending chat to OpenAI`, { chat }); const res: OpenAI.Chat.Completions.ChatCompletion = await openAI.chat.completions.create({ - messages: chatHistory, - model: "gpt-4o", + messages: createChatHistory(formattedChat), + model: "gpt-4o", // "gpt-4o temperature: 0, }); @@ -42,4 +228,4 @@ export async function askGPT(context: Context, chatHistory: ChatCompletionMessag }; return { answer, tokenUsage }; -} +} \ No newline at end of file diff --git a/src/plugin.ts b/src/plugin.ts index ee5dd76..90fa2a3 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -35,42 +35,45 @@ export async function plugin(inputs: PluginInputs, env: Env) { }, adapters: {} as never, }; + const { logger, config: { isEnabled } } = context; if (isSupportedEvent(context.eventName)) { const comment = context.payload.comment.body; if (!comment.startsWith("/gpt")) { - context.logger.info("Comment does not start with /gpt. Skipping."); + logger.info("Comment does not start with /gpt. Skipping."); return; } if (context.payload.comment.user?.type === "Bot") { - context.logger.info("Comment is from a bot. Skipping."); + logger.info("Comment is from a bot. Skipping."); return; } - const { isEnabled } = context.config; - if (!isEnabled) { - context.logger.info("Plugin is disabled. Skipping."); + logger.info("Plugin is disabled. Skipping."); await addCommentToIssue(context, "The /gpt command is disabled. Enable it in the plugin settings.", true, "warning"); return; } - const response = await askQuestion(context, comment.slice(4).trim()); + const question = comment.slice(4).trim(); + + logger.info(`Asking question: ${question}`); + const response = await askQuestion(context, question); if (response) { const { answer, tokenUsage } = response if (!answer) { - context.logger.error(`No answer from OpenAI`); + logger.error(`No answer from OpenAI`); return; } - context.logger.info(`Answer: ${answer}`, { tokenUsage }); + logger.info(`Answer: ${answer}`, { tokenUsage }); await addCommentToIssue(context, answer); + } else { + logger.error(`No response from OpenAI`); } - } else { - context.logger.error(`Unsupported event: ${context.eventName}`); + logger.error(`Unsupported event: ${context.eventName}`); } } diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts new file mode 100644 index 0000000..faffc1b --- /dev/null +++ b/src/utils/format-chat-history.ts @@ -0,0 +1,101 @@ +import { ChatCompletionMessageParam } from "openai/resources"; +import { Context } from "../types"; +import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; +import { fetchPullRequestDiff } from "./issue"; + +export function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record, linkedPulls: Record) { + const convoKeys = Object.keys(streamlined); + const specAndBodyKeys = Object.keys(specAndBodies); + + const curIssue = { + convo: streamlined[convoKeys[0]], + specOrBody: specAndBodies[specAndBodyKeys[0]], + } + + let issueSpecBlock = ""; + issueSpecBlock += createHeader("Project Specification", specAndBodyKeys[0]); + issueSpecBlock += createSpecOrBody(curIssue.specOrBody); + issueSpecBlock += createFooter("Project Specification"); + + let issueCommentBlock = ""; + issueCommentBlock += createHeader("Issue Conversation", convoKeys[0]); + issueCommentBlock += createComment({ issue: parseInt(convoKeys[0].split("/")[2]), repo: convoKeys[0].split("/")[1], org: convoKeys[0].split("/")[0], comments: curIssue.convo }); + issueCommentBlock += createFooter("Issue Conversation"); + + delete convoKeys[0]; + + const linkedContextBlocks = convoKeys.map((key) => { + const comments = streamlined[key]; + const [org, repo, issues, issue] = key.split("/"); + const isPull = linkedPulls[key]; + const specHeader = isPull ? `Linked Pull #${issue} Request Body` : `Linked Issue #${issue} Specification`; + + const specOrBody = specAndBodies[key]; + let specOrBodyBlock = createHeader(specHeader, key); + specOrBodyBlock += createSpecOrBody(specOrBody); + specOrBodyBlock += createFooter(specHeader); + + const header = isPull ? `Linked Pull #${issue} Request Conversation` : `Linked Issue #${issue} Conversation`; + const repoString = `${org}/${repo} #${issue}`; + const diff = isPull ? fetchPullRequestDiff(context, org, repo, issue) : null; + + let block = "" + block += specOrBodyBlock; + block += createHeader(header, repoString); + block += createComment({ issue: parseInt(issue), repo, org, comments }); + block += createFooter(header); + + if (!isPull) { + return block; + } + + let diffBlock = createHeader("Linked Pull Request Code Diff", repoString); + diffBlock += diff ? diff : "No diff available"; + diffBlock += createFooter("Linked Pull Request Code Diff"); + return block + diffBlock; + }); + + + return issueSpecBlock + issueCommentBlock + linkedContextBlocks.join(""); +} + +function createHeader(content: string, repoString: string) { + return `=== ${content} === ${repoString} ===\n\n` +} + +function createFooter(content: string) { + return `=== End ${content} ===\n\n` +} + +function createComment(comment: StreamlinedComments) { + let comments = ""; + for (const c of comment.comments) { + comments += `${c.id} ${c.user}: ${c.body}\n`; + } + return comments; +} + +function createSpecOrBody(specOrBody: string) { + return `${specOrBody}\n` +} + +export function createChatHistory(formattedChat: string) { + const chatHistory: ChatCompletionMessageParam[] = []; + + const systemMessage: ChatCompletionMessageParam = { + role: "system", + + content: `Using the provided context, address the question being asked and make sure to provide a clear and concise answer with no follow-up statements. + The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. + Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.` + }; + + const userMessage: ChatCompletionMessageParam = { + role: "user", + content: formattedChat, + }; + + chatHistory.push(systemMessage, userMessage); + + return chatHistory; +} \ No newline at end of file diff --git a/src/utils/issue.ts b/src/utils/issue.ts index 0b827cc..c690f5b 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -91,6 +91,25 @@ async function filterLinkedIssues(params: FetchParams, linkedIssues: { issueNumb return contextIssues; } +export async function getLinkedIssueContextFromComments(context: Context, issueComments: IssueComments) { + // find any linked issues in comments by parsing the comments and enforcing that the + // linked issue is from the same org that the current issue is from + const linkedIssues = await fetchLinkedIssues({ context }, issueComments); + + // the conversational history of the linked issues + const linkedIssueComments: IssueComments = []; + + // we are only going one level deep with the linked issue context fetching + for (const issue of linkedIssues) { + console.log(`Fetching linked issue ${issue.issueNumber}`) + const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }) + linkedIssueComments.push(...fetched); + } + + return { linkedIssues, linkedIssueComments }; +} + + export function idIssueFromComment(owner?: string, comment?: string | null) { if (!comment) { return null; From 6063f36e4bb785185265a504faf4ff64efff4957 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 03:35:58 +0100 Subject: [PATCH 11/72] fix: cspell, eslint --- .cspell.json | 19 +- package.json | 2 +- src/handlers/add-comment.ts | 38 ++-- src/handlers/ask-gpt.ts | 371 ++++++++++++++++--------------- src/plugin.ts | 7 +- src/types/context.ts | 4 +- src/types/gpt.ts | 18 +- src/utils/format-chat-history.ts | 154 +++++++------ src/utils/issue.ts | 305 +++++++++++++------------ 9 files changed, 490 insertions(+), 428 deletions(-) diff --git a/.cspell.json b/.cspell.json index 213394b..b5f9628 100644 --- a/.cspell.json +++ b/.cspell.json @@ -4,7 +4,24 @@ "ignorePaths": ["**/*.json", "**/*.css", "node_modules", "**/*.log", "./src/adapters/supabase/**/**.ts"], "useGitignore": true, "language": "en", - "words": ["Nektos", "dataurl", "devpool", "outdir", "servedir", "Supabase", "SUPABASE", "typebox", "ubiquibot", "Smee"], + "words": [ + "Nektos", + "dataurl", + "devpool", + "outdir", + "servedir", + "Supabase", + "SUPABASE", + "typebox", + "ubiquibot", + "Smee", + "sonarjs", + "knip", + "mischeck", + "convo", + "ubqbot", + "behaviour" + ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], "ignoreRegExpList": ["[0-9a-fA-F]{6}"] diff --git a/package.json b/package.json index 7eccaea..be1396f 100644 --- a/package.json +++ b/package.json @@ -81,4 +81,4 @@ ] }, "packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" -} \ No newline at end of file +} diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index fe8d732..2417212 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -1,28 +1,28 @@ import { Context } from "../types/context"; const diffStyles = { - warning: "```diff\n! ", - error: "```diff\n- ", - success: "```diff\n+ ", - info: "```diff\n# ", + warning: "```diff\n! ", + error: "```diff\n- ", + success: "```diff\n+ ", + info: "```diff\n# ", }; export async function addCommentToIssue(context: Context, message: string, diff = false, diffStyle?: keyof typeof diffStyles) { - const { payload } = context; - const issueNumber = payload.issue.number; + const { payload } = context; + const issueNumber = payload.issue.number; - if (diff && diffStyle) { - message = `${diffStyles[diffStyle]}${message}\n\`\`\``; - } + if (diff && diffStyle) { + message = `${diffStyles[diffStyle]}${message}\n\`\`\``; + } - try { - await context.octokit.issues.createComment({ - owner: payload.repository.owner.login, - repo: payload.repository.name, - issue_number: issueNumber, - body: message, - }); - } catch (e: unknown) { - context.logger.fatal("Adding a comment failed!", e); - } + try { + await context.octokit.issues.createComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + issue_number: issueNumber, + body: message, + }); + } catch (e: unknown) { + context.logger.fatal("Adding a comment failed!", e); + } } diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 1d2f709..d42cf8f 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -7,225 +7,234 @@ import { createChatHistory, formatChatHistory } from "../utils/format-chat-histo import { addCommentToIssue } from "./add-comment"; export async function askQuestion(context: Context, question: string) { - const { logger, payload: { issue: currentIssue } } = context; - - if (!question) { - logger.error(`No question provided`); - await addCommentToIssue(context, "No question provided", true, "error"); - return; - } - - const { body: issueSpecOrPullBody, repository_url } = currentIssue; - const org = repository_url.split("/")[4]; - - const { - specReferencedIssueBody, - specReferencedIssueKey, - streamlinedSpecReferencedIssueComments, - } = await getSpecReferencedContext(context, org, issueSpecOrPullBody); - - const issueComments = await fetchIssueComments({ context }); - const linkedIssueContext = await getLinkedIssueContextFromComments(context, issueComments); - const { linkedIssues, linkedIssueComments } = linkedIssueContext; - - // we are only going one level deep with the linked issue context fetching - for (const issue of linkedIssues) { - console.log(`Fetching linked issue ${issue.issueNumber}`) - const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }) - linkedIssueComments.push(...fetched); - } - - const streamlinedComments = await getAllStreamlinedComments(issueComments, streamlinedSpecReferencedIssueComments, linkedIssueComments); - const { linkedPulls, specAndBodies } = await getSpecBodiesAndLinkedPulls(context, repository_url, currentIssue.number, issueSpecOrPullBody, specReferencedIssueBody, specReferencedIssueKey, linkedIssues); - const formattedChat = formatChatHistory(context, streamlinedComments, specAndBodies, linkedPulls); - - return await askGPT(context, formattedChat); + const { + logger, + payload: { issue: currentIssue }, + } = context; + + if (!question) { + logger.error(`No question provided`); + await addCommentToIssue(context, "No question provided", true, "error"); + return; + } + + const { body: issueSpecOrPullBody, repository_url } = currentIssue; + const org = repository_url.split("/")[4]; + + const { specReferencedIssueBody, specReferencedIssueKey, streamlinedSpecReferencedIssueComments } = await getSpecReferencedContext( + context, + org, + issueSpecOrPullBody + ); + + const issueComments = await fetchIssueComments({ context }); + const linkedIssueContext = await getLinkedIssueContextFromComments(context, issueComments); + const { linkedIssues, linkedIssueComments } = linkedIssueContext; + + // we are only going one level deep with the linked issue context fetching + for (const issue of linkedIssues) { + console.log(`Fetching linked issue ${issue.issueNumber}`); + const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }); + linkedIssueComments.push(...fetched); + } + + const streamlinedComments = await getAllStreamlinedComments(issueComments, streamlinedSpecReferencedIssueComments, linkedIssueComments); + const { linkedPulls, specAndBodies } = await getSpecBodiesAndLinkedPulls( + context, + repository_url, + currentIssue.number, + issueSpecOrPullBody, + specReferencedIssueBody, + specReferencedIssueKey, + linkedIssues + ); + const formattedChat = formatChatHistory(context, streamlinedComments, specAndBodies, linkedPulls); + + return await askGpt(context, formattedChat); } async function getAllStreamlinedComments( - issueComments: IssueComments, - streamlinedSpecReferencedIssueComments: Record | undefined, - linkedIssueComments: IssueComments + issueComments: IssueComments, + streamlinedSpecReferencedIssueComments: Record | undefined, + linkedIssueComments: IssueComments ) { - const streamlinedComments = streamlineComments(issueComments) ?? {}; - - if (streamlinedSpecReferencedIssueComments && - Object.keys(streamlinedSpecReferencedIssueComments).length > 0) { - for (const [key, value] of Object.entries(streamlinedSpecReferencedIssueComments)) { - if (!streamlinedComments[key]) { - streamlinedComments[key] = value; - continue; - } - - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; - } - } + const streamlinedComments = streamlineComments(issueComments) ?? {}; - if (linkedIssueComments.length > 0) { - const linkedStreamlinedComments = streamlineComments(linkedIssueComments); + if (streamlinedSpecReferencedIssueComments && Object.keys(streamlinedSpecReferencedIssueComments).length > 0) { + for (const [key, value] of Object.entries(streamlinedSpecReferencedIssueComments)) { + if (!streamlinedComments[key]) { + streamlinedComments[key] = value; + continue; + } - if (linkedStreamlinedComments) { - for (const [key, value] of Object.entries(linkedStreamlinedComments)) { - if (!streamlinedComments[key]) { - streamlinedComments[key] = value; - continue; - } + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; + } + } + + if (linkedIssueComments.length > 0) { + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; - } + if (linkedStreamlinedComments) { + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { + if (!streamlinedComments[key]) { + streamlinedComments[key] = value; + continue; } + + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; + } } + } - return streamlinedComments; + return streamlinedComments; } async function getSpecBodiesAndLinkedPulls( - context: Context, - currentIssueUrl: string, - currentIssueNumber: number, - issueSpecOrPullBody: string | null, - specReferencedIssueBody: string | null | undefined, - specReferencedIssueKey: string | null | undefined, - linkedIssues: { issueNumber: number; repo: string; }[] + context: Context, + currentIssueUrl: string, + currentIssueNumber: number, + issueSpecOrPullBody: string | null, + specReferencedIssueBody: string | null | undefined, + specReferencedIssueKey: string | null | undefined, + linkedIssues: { issueNumber: number; repo: string }[] ) { - const linkedPulls: Record = {}; - const currentIssueKey = createKey(currentIssueUrl, currentIssueNumber); - // collect specifically all of the spec and PR bodies - const specAndBodies: Record = {}; - specAndBodies[currentIssueKey] = issueSpecOrPullBody || ""; - specAndBodies[specReferencedIssueKey as string] = specReferencedIssueBody || ""; - - for (const linkedIssue of linkedIssues) { - const issue = await fetchIssue({ context, issueNum: linkedIssue.issueNumber, repo: linkedIssue.repo }); - const { body, repository_url, pull_request } = issue; - const linkedIssueKey = createKey(repository_url, linkedIssue.issueNumber); - specAndBodies[linkedIssueKey] = body || ""; - - if (pull_request) { - linkedPulls[linkedIssueKey] = true - } + const linkedPulls: Record = {}; + const currentIssueKey = createKey(currentIssueUrl, currentIssueNumber); + // collect specifically all of the spec and PR bodies + const specAndBodies: Record = {}; + specAndBodies[currentIssueKey] = issueSpecOrPullBody || ""; + specAndBodies[specReferencedIssueKey as string] = specReferencedIssueBody || ""; + + for (const linkedIssue of linkedIssues) { + const issue = await fetchIssue({ context, issueNum: linkedIssue.issueNumber, repo: linkedIssue.repo }); + const { body, repository_url, pull_request } = issue; + const linkedIssueKey = createKey(repository_url, linkedIssue.issueNumber); + specAndBodies[linkedIssueKey] = body || ""; + + if (pull_request) { + linkedPulls[linkedIssueKey] = true; } + } - return { specAndBodies, linkedPulls }; + return { specAndBodies, linkedPulls }; } -async function getSpecReferencedContext( - context: Context, - org: string, - issueSpecOrPullBody: string | null, -) { - // fetch the spec referenced issue if it exists - const specReferencedIssueId = idIssueFromComment(org, issueSpecOrPullBody); - let specReferencedIssue, - specReferencedIssueBody, - specReferencedIssueRepoUrl, - specReferencedIssueComments, - specReferencedIssueKey, - streamlinedSpecReferencedIssueComments; - - if (specReferencedIssueId) { - specReferencedIssue = await fetchIssue({ context, issueNum: specReferencedIssueId.issueNumber }); - specReferencedIssueBody = specReferencedIssue.body; - specReferencedIssueRepoUrl = specReferencedIssue.repository_url; - specReferencedIssueComments = await fetchIssueComments({ context, issueNum: specReferencedIssueId.issueNumber, repo: specReferencedIssueId.repo }) - specReferencedIssueKey = createKey(specReferencedIssueRepoUrl, specReferencedIssueId?.issueNumber); - streamlinedSpecReferencedIssueComments = streamlineComments(specReferencedIssueComments) ?? {}; - } - - return { - specReferencedIssue, - specReferencedIssueBody, - specReferencedIssueRepoUrl, - specReferencedIssueComments, - specReferencedIssueKey, - streamlinedSpecReferencedIssueComments, - }; +async function getSpecReferencedContext(context: Context, org: string, issueSpecOrPullBody: string | null) { + // fetch the spec referenced issue if it exists + const specReferencedIssueId = idIssueFromComment(org, issueSpecOrPullBody); + let specReferencedIssue, + specReferencedIssueBody, + specReferencedIssueRepoUrl, + specReferencedIssueComments, + specReferencedIssueKey, + streamlinedSpecReferencedIssueComments; + + if (specReferencedIssueId) { + specReferencedIssue = await fetchIssue({ context, issueNum: specReferencedIssueId.issueNumber }); + specReferencedIssueBody = specReferencedIssue.body; + specReferencedIssueRepoUrl = specReferencedIssue.repository_url; + specReferencedIssueComments = await fetchIssueComments({ context, issueNum: specReferencedIssueId.issueNumber, repo: specReferencedIssueId.repo }); + specReferencedIssueKey = createKey(specReferencedIssueRepoUrl, specReferencedIssueId?.issueNumber); + streamlinedSpecReferencedIssueComments = streamlineComments(specReferencedIssueComments) ?? {}; + } + + return { + specReferencedIssue, + specReferencedIssueBody, + specReferencedIssueRepoUrl, + specReferencedIssueComments, + specReferencedIssueKey, + streamlinedSpecReferencedIssueComments, + }; } function createKey(issueUrl: string, issue?: number) { - const splitUrl = issueUrl?.split("/"); - const issueNumber = issue || parseInt(splitUrl?.pop() || ""); - const issueRepo = splitUrl?.slice(-2).join("/"); - let issueOrg = splitUrl?.slice(-3, -2).join("/"); + const splitUrl = issueUrl?.split("/"); + const issueNumber = issue || parseInt(splitUrl?.pop() || ""); + const issueRepo = splitUrl?.slice(-2).join("/"); + const issueOrg = splitUrl?.slice(-3, -2).join("/"); - if (issueOrg.startsWith("repos")) { - return `${issueRepo}/issues/${issueNumber}`; - } + if (issueOrg.startsWith("repos")) { + return `${issueRepo}/issues/${issueNumber}`; + } - return `${issueOrg}/${issueRepo}/${issueNumber}`; + return `${issueOrg}/${issueRepo}/${issueNumber}`; } function streamlineComments(comments: IssueComments) { - const streamlined: Record = {}; - - for (const comment of comments) { - const user = comment.user?.login; - if ( - user === "ubiquibot" || - user === "ubiquibot[bot]" || - user === "ubiquibot-v2-testing" || - user === "ubiquibot-dev[bot]" || - user === "ubqbot[bot]" || // TODO: remove this - user === "github-actions[bot]" - ) { - const isPreviousAnswer = comment.body?.includes(""); - if (!isPreviousAnswer) continue; - } + const streamlined: Record = {}; + + for (const comment of comments) { + const user = comment.user?.login; + if ( + user === "ubiquibot" || + user === "ubiquibot[bot]" || + user === "ubiquibot-v2-testing" || + user === "ubiquibot-dev[bot]" || + user === "ubqbot[bot]" || // TODO: remove this + user === "github-actions[bot]" + ) { + const isPreviousAnswer = comment.body?.includes(""); + if (!isPreviousAnswer) continue; + } - const body = comment.body; - const key = createKey(comment.issue_url); + const body = comment.body; + const key = createKey(comment.issue_url); - if (!streamlined[key]) { - streamlined[key] = []; - } + if (!streamlined[key]) { + streamlined[key] = []; + } - if (user && body) { - streamlined[key].push({ - user, - body, - id: comment.id, - }); - } + if (user && body) { + streamlined[key].push({ + user, + body, + id: comment.id, + }); } - return streamlined; + } + return streamlined; } -export async function askGPT(context: Context, formattedChat: string) { - const { logger, config: { openAi_apiKey } } = context; +export async function askGpt(context: Context, formattedChat: string) { + const { + logger, + config: { openAi_apiKey }, + } = context; - if (!openAi_apiKey) { - logger.error(`No OpenAI API Key provided`); - await addCommentToIssue(context, "No OpenAI API Key detected!", true, "error"); // TOO confirm correct style here - return; - } + if (!openAi_apiKey) { + logger.error(`No OpenAI API Key provided`); + await addCommentToIssue(context, "No OpenAI API Key detected!", true, "error"); // TOO confirm correct style here + return; + } - const openAI = new OpenAI({ apiKey: openAi_apiKey }); + const openAi = new OpenAI({ apiKey: openAi_apiKey }); - const chat = createChatHistory(formattedChat); + const chat = createChatHistory(formattedChat); - logger.info(`Sending chat to OpenAI`, { chat }); + logger.info(`Sending chat to OpenAI`, { chat }); - const res: OpenAI.Chat.Completions.ChatCompletion = await openAI.chat.completions.create({ - messages: createChatHistory(formattedChat), - model: "gpt-4o", // "gpt-4o - temperature: 0, - }); + const res: OpenAI.Chat.Completions.ChatCompletion = await openAi.chat.completions.create({ + messages: createChatHistory(formattedChat), + model: "gpt-4o", // "gpt-4o + temperature: 0, + }); - if (!res.choices) { - logger.error(`No response from OpenAI`); - await addCommentToIssue(context, "No response from OpenAI", true, "error"); - return; - } + if (!res.choices) { + logger.error(`No response from OpenAI`); + await addCommentToIssue(context, "No response from OpenAI", true, "error"); + return; + } - const answer = res.choices[0].message.content; + const answer = res.choices[0].message.content; - const tokenUsage = { - output: res.usage?.completion_tokens, - input: res.usage?.prompt_tokens, - total: res.usage?.total_tokens, - }; + const tokenUsage = { + output: res.usage?.completion_tokens, + input: res.usage?.prompt_tokens, + total: res.usage?.total_tokens, + }; - return { answer, tokenUsage }; -} \ No newline at end of file + return { answer, tokenUsage }; +} diff --git a/src/plugin.ts b/src/plugin.ts index 90fa2a3..80f8359 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -35,7 +35,10 @@ export async function plugin(inputs: PluginInputs, env: Env) { }, adapters: {} as never, }; - const { logger, config: { isEnabled } } = context; + const { + logger, + config: { isEnabled }, + } = context; if (isSupportedEvent(context.eventName)) { const comment = context.payload.comment.body; @@ -62,7 +65,7 @@ export async function plugin(inputs: PluginInputs, env: Env) { const response = await askQuestion(context, question); if (response) { - const { answer, tokenUsage } = response + const { answer, tokenUsage } = response; if (!answer) { logger.error(`No answer from OpenAI`); return; diff --git a/src/types/context.ts b/src/types/context.ts index ec417f6..0b696a0 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -3,7 +3,7 @@ import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as Webhook import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; -export type SupportedEventsU = "issue_comment.created" +export type SupportedEventsU = "issue_comment.created"; export type SupportedEvents = { [K in SupportedEventsU]: K extends WebhookEventName ? WebhookEvent : never; @@ -13,7 +13,7 @@ export interface Context; - adapters: never + adapters: never; config: PluginSettings; env: Env; logger: { diff --git a/src/types/gpt.ts b/src/types/gpt.ts index 9d7bde7..c84e649 100644 --- a/src/types/gpt.ts +++ b/src/types/gpt.ts @@ -1,12 +1,12 @@ export type StreamlinedComment = { - user?: string; - body?: string; - id: number; -} + user?: string; + body?: string; + id: number; +}; export type StreamlinedComments = { - issue: number; - repo: string; - org: string; - comments: StreamlinedComment[]; -} \ No newline at end of file + issue: number; + repo: string; + org: string; + comments: StreamlinedComment[]; +}; diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index faffc1b..40ffa90 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -3,99 +3,109 @@ import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; import { fetchPullRequestDiff } from "./issue"; -export function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record, linkedPulls: Record) { - const convoKeys = Object.keys(streamlined); - const specAndBodyKeys = Object.keys(specAndBodies); - - const curIssue = { - convo: streamlined[convoKeys[0]], - specOrBody: specAndBodies[specAndBodyKeys[0]], +export function formatChatHistory( + context: Context, + streamlined: Record, + specAndBodies: Record, + linkedPulls: Record +) { + const convoKeys = Object.keys(streamlined); + const specAndBodyKeys = Object.keys(specAndBodies); + + const curIssue = { + convo: streamlined[convoKeys[0]], + specOrBody: specAndBodies[specAndBodyKeys[0]], + }; + + let issueSpecBlock = ""; + issueSpecBlock += createHeader("Project Specification", specAndBodyKeys[0]); + issueSpecBlock += createSpecOrBody(curIssue.specOrBody); + issueSpecBlock += createFooter("Project Specification"); + + let issueCommentBlock = ""; + issueCommentBlock += createHeader("Issue Conversation", convoKeys[0]); + issueCommentBlock += createComment({ + issue: parseInt(convoKeys[0].split("/")[2]), + repo: convoKeys[0].split("/")[1], + org: convoKeys[0].split("/")[0], + comments: curIssue.convo, + }); + issueCommentBlock += createFooter("Issue Conversation"); + + delete convoKeys[0]; + + const linkedContextBlocks = convoKeys.map(async (key) => { + const comments = streamlined[key]; + const [org, repo, _issues, issue] = key.split("/"); + const isPull = linkedPulls[key]; + const specHeader = isPull ? `Linked Pull #${issue} Request Body` : `Linked Issue #${issue} Specification`; + + const specOrBody = specAndBodies[key]; + let specOrBodyBlock = createHeader(specHeader, key); + specOrBodyBlock += createSpecOrBody(specOrBody); + specOrBodyBlock += createFooter(specHeader); + + const header = isPull ? `Linked Pull #${issue} Request Conversation` : `Linked Issue #${issue} Conversation`; + const repoString = `${org}/${repo} #${issue}`; + const diff = isPull ? await fetchPullRequestDiff(context, org, repo, issue) : null; + + let block = ""; + block += specOrBodyBlock; + block += createHeader(header, repoString); + block += createComment({ issue: parseInt(issue), repo, org, comments }); + block += createFooter(header); + + if (!isPull) { + return block; } - let issueSpecBlock = ""; - issueSpecBlock += createHeader("Project Specification", specAndBodyKeys[0]); - issueSpecBlock += createSpecOrBody(curIssue.specOrBody); - issueSpecBlock += createFooter("Project Specification"); - - let issueCommentBlock = ""; - issueCommentBlock += createHeader("Issue Conversation", convoKeys[0]); - issueCommentBlock += createComment({ issue: parseInt(convoKeys[0].split("/")[2]), repo: convoKeys[0].split("/")[1], org: convoKeys[0].split("/")[0], comments: curIssue.convo }); - issueCommentBlock += createFooter("Issue Conversation"); - - delete convoKeys[0]; - - const linkedContextBlocks = convoKeys.map((key) => { - const comments = streamlined[key]; - const [org, repo, issues, issue] = key.split("/"); - const isPull = linkedPulls[key]; - const specHeader = isPull ? `Linked Pull #${issue} Request Body` : `Linked Issue #${issue} Specification`; - - const specOrBody = specAndBodies[key]; - let specOrBodyBlock = createHeader(specHeader, key); - specOrBodyBlock += createSpecOrBody(specOrBody); - specOrBodyBlock += createFooter(specHeader); - - const header = isPull ? `Linked Pull #${issue} Request Conversation` : `Linked Issue #${issue} Conversation`; - const repoString = `${org}/${repo} #${issue}`; - const diff = isPull ? fetchPullRequestDiff(context, org, repo, issue) : null; + let diffBlock = ""; + diffBlock += createHeader("Linked Pull Request Code Diff", repoString); + diffBlock += diff ? diff : "No diff available"; + diffBlock += createFooter("Linked Pull Request Code Diff"); + return block + diffBlock; + }); - let block = "" - block += specOrBodyBlock; - block += createHeader(header, repoString); - block += createComment({ issue: parseInt(issue), repo, org, comments }); - block += createFooter(header); - - if (!isPull) { - return block; - } - - let diffBlock = createHeader("Linked Pull Request Code Diff", repoString); - diffBlock += diff ? diff : "No diff available"; - diffBlock += createFooter("Linked Pull Request Code Diff"); - return block + diffBlock; - }); - - - return issueSpecBlock + issueCommentBlock + linkedContextBlocks.join(""); + return issueSpecBlock + issueCommentBlock + linkedContextBlocks.join(""); } function createHeader(content: string, repoString: string) { - return `=== ${content} === ${repoString} ===\n\n` + return `=== ${content} === ${repoString} ===\n\n`; } function createFooter(content: string) { - return `=== End ${content} ===\n\n` + return `=== End ${content} ===\n\n`; } function createComment(comment: StreamlinedComments) { - let comments = ""; - for (const c of comment.comments) { - comments += `${c.id} ${c.user}: ${c.body}\n`; - } - return comments; + let comments = ""; + for (const c of comment.comments) { + comments += `${c.id} ${c.user}: ${c.body}\n`; + } + return comments; } function createSpecOrBody(specOrBody: string) { - return `${specOrBody}\n` + return `${specOrBody}\n`; } export function createChatHistory(formattedChat: string) { - const chatHistory: ChatCompletionMessageParam[] = []; + const chatHistory: ChatCompletionMessageParam[] = []; - const systemMessage: ChatCompletionMessageParam = { - role: "system", + const systemMessage: ChatCompletionMessageParam = { + role: "system", - content: `Using the provided context, address the question being asked and make sure to provide a clear and concise answer with no follow-up statements. + content: `Using the provided context, address the question being asked and make sure to provide a clear and concise answer with no follow-up statements. The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. - Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.` - }; + Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, + }; - const userMessage: ChatCompletionMessageParam = { - role: "user", - content: formattedChat, - }; + const userMessage: ChatCompletionMessageParam = { + role: "user", + content: formattedChat, + }; - chatHistory.push(systemMessage, userMessage); + chatHistory.push(systemMessage, userMessage); - return chatHistory; -} \ No newline at end of file + return chatHistory; +} diff --git a/src/utils/issue.ts b/src/utils/issue.ts index c690f5b..2a83c6c 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -2,177 +2,200 @@ import { Context } from "../types"; import { Issue, IssueComments } from "../types/github"; type FetchParams = { - context: Context; - issueNum?: number; - owner?: string; - repo?: string; + context: Context; + issueNum?: number; + owner?: string; + repo?: string; }; /** - * Because in the eyes of the GitHub api Pull Requests are also + * Because in the eyes of the GitHub api Pull Requests are also * issues, we can use the same functions for both. */ export async function fetchIssue(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; - - return await octokit.issues.get({ - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }).then(({ data }) => data as Issue); + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.issues + .get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }) + .then(({ data }) => data as Issue); } export async function fetchIssueComments(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; - - return await octokit.paginate(octokit.issues.listComments, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }).then((comments) => comments as IssueComments); + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit + .paginate(octokit.issues.listComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }) + .then((comments) => comments as IssueComments); } export async function fetchLinkedIssues(params: FetchParams, comments?: IssueComments) { - let issueComments: IssueComments | undefined = comments; - const linkedIssues: { - issueNumber: number; - repo: string; - }[] = []; - - if (!issueComments && !params) { - throw new Error("Either issueComments or params must be provided"); + let issueComments: IssueComments | undefined = comments; + const linkedIssues: { + issueNumber: number; + repo: string; + }[] = []; + + if (!issueComments && !params) { + throw new Error("Either issueComments or params must be provided"); + } + + if (!issueComments) { + issueComments = await fetchIssueComments(params); + } + + const { + context: { + logger, + payload: { + repository: { + owner: { login }, + }, + }, + }, + } = params; + + if (!issueComments) { + logger.info("No comments found on issue"); + return linkedIssues; + } + + for (const comment of issueComments) { + const linkedIssue = idIssueFromComment(login, comment.body); + if (linkedIssue) { + linkedIssues.push(linkedIssue); } + } - if (!issueComments) { - issueComments = await fetchIssueComments(params); - } - - const { context: { logger, payload: { repository: { owner: { login } } } } } = params - - if (!issueComments) { - logger.info("No comments found on issue"); - return linkedIssues - } - - for (const comment of issueComments) { - const linkedIssue = idIssueFromComment(login, comment.body); - if (linkedIssue) { - linkedIssues.push(linkedIssue); - } - } - - return await filterLinkedIssues(params, linkedIssues); + return await filterLinkedIssues(params, linkedIssues); } -async function filterLinkedIssues(params: FetchParams, linkedIssues: { issueNumber: number; repo: string; }[]) { - const { context: { logger, payload: { repository: { owner: { login } } } } } = params - - const contextIssues: { - issueNumber: number; - repo: string; - }[] = []; - - for (const issue of linkedIssues) { - if (issue && issue.issueNumber && issue.repo) { - if (await isRepoFromSameOrg(params.context, issue.repo, login)) { - contextIssues.push({ - issueNumber: issue.issueNumber, - repo: issue.repo - }); - } else { - logger.info(`Ignoring linked issue ${issue.issueNumber} from ${issue.repo} as it is not from the same org`); - } - } +async function filterLinkedIssues(params: FetchParams, linkedIssues: { issueNumber: number; repo: string }[]) { + const { + context: { + logger, + payload: { + repository: { + owner: { login }, + }, + }, + }, + } = params; + + const contextIssues: { + issueNumber: number; + repo: string; + }[] = []; + + for (const issue of linkedIssues) { + if (issue && issue.issueNumber && issue.repo) { + if (await isRepoFromSameOrg(params.context, issue.repo, login)) { + contextIssues.push({ + issueNumber: issue.issueNumber, + repo: issue.repo, + }); + } else { + logger.info(`Ignoring linked issue ${issue.issueNumber} from ${issue.repo} as it is not from the same org`); + } } + } - return contextIssues; + return contextIssues; } export async function getLinkedIssueContextFromComments(context: Context, issueComments: IssueComments) { - // find any linked issues in comments by parsing the comments and enforcing that the - // linked issue is from the same org that the current issue is from - const linkedIssues = await fetchLinkedIssues({ context }, issueComments); - - // the conversational history of the linked issues - const linkedIssueComments: IssueComments = []; - - // we are only going one level deep with the linked issue context fetching - for (const issue of linkedIssues) { - console.log(`Fetching linked issue ${issue.issueNumber}`) - const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }) - linkedIssueComments.push(...fetched); - } + // find any linked issues in comments by parsing the comments and enforcing that the + // linked issue is from the same org that the current issue is from + const linkedIssues = await fetchLinkedIssues({ context }, issueComments); - return { linkedIssues, linkedIssueComments }; -} + // the conversational history of the linked issues + const linkedIssueComments: IssueComments = []; + // we are only going one level deep with the linked issue context fetching + for (const issue of linkedIssues) { + console.log(`Fetching linked issue ${issue.issueNumber}`); + const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }); + linkedIssueComments.push(...fetched); + } -export function idIssueFromComment(owner?: string, comment?: string | null) { - if (!comment) { - return null; - } - if (!owner) { - throw new Error("Owner must be provided when parsing linked issues"); - } - // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag - const urlMatch = comment.match(/https:\/\/github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); - - /** - * I think we should restrict including any linked context which is not of the same org. - * - * In most cases this will be the expected behaviour, I remember a scenario where - * I linked to an issue in a 3rd party org, for extra reviewer context but I also include the - * TL;DR which is always the case. We wouldn't want that full 3rd party PR review or issue to be - * included in the context. - */ - - const linkedIssue: { - issueNumber: number; - repo: string; - } = { - issueNumber: 0, - repo: "" - }; - - /** - * If following the rule that only issues from the same org should be included - * then we need to be sure that this format of linked issue is from the same org. - */ - - if (urlMatch && urlMatch[1] === owner) { - linkedIssue.issueNumber = parseInt(urlMatch[4]); - linkedIssue.repo = urlMatch[2]; - } + return { linkedIssues, linkedIssueComments }; +} - return linkedIssue; +export function idIssueFromComment(owner?: string, comment?: string | null) { + if (!comment) { + return null; + } + if (!owner) { + throw new Error("Owner must be provided when parsing linked issues"); + } + // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag + const urlMatch = comment.match(/https:\/\/github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); + + /** + * I think we should restrict including any linked context which is not of the same org. + * + * In most cases this will be the expected behaviour, I remember a scenario where + * I linked to an issue in a 3rd party org, for extra reviewer context but I also include the + * TL;DR which is always the case. We wouldn't want that full 3rd party PR review or issue to be + * included in the context. + */ + + const linkedIssue: { + issueNumber: number; + repo: string; + } = { + issueNumber: 0, + repo: "", + }; + + /** + * If following the rule that only issues from the same org should be included + * then we need to be sure that this format of linked issue is from the same org. + */ + + if (urlMatch && urlMatch[1] === owner) { + linkedIssue.issueNumber = parseInt(urlMatch[4]); + linkedIssue.repo = urlMatch[2]; + } + + return linkedIssue; } async function isRepoFromSameOrg(context: Context, repo: string, owner: string) { - const { octokit } = context; - const { data } = await octokit.repos.get({ - owner, - repo - }); + const { octokit } = context; + const { data } = await octokit.repos.get({ + owner, + repo, + }); - return data.owner.login === owner; + return data.owner.login === owner; } - export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: string) { - const { logger, octokit } = context; - - try { - const diff = await octokit.pulls.get({ - owner: org, - repo, - pull_number: parseInt(issue) - }); - return diff.data; - } catch (error) { - logger.error(`Error fetching pull request diff: ${error}`); - return null; - } -} \ No newline at end of file + const { logger, octokit } = context; + + try { + const diff = await octokit.pulls.get({ + owner: org, + repo, + pull_number: parseInt(issue), + mediaType: { + format: "diff", + }, + }); + return diff.data as unknown as string; + } catch (error) { + logger.error(`Error fetching pull request diff: ${error}`); + return null; + } +} From 1d51869054031f014139214e0a59479853ff02c6 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 03:39:43 +0100 Subject: [PATCH 12/72] feat: ubiquibot-logger --- package.json | 1 + src/handlers/add-comment.ts | 2 +- src/handlers/ask-gpt.ts | 1 - src/plugin.ts | 19 +--------- src/types/context.ts | 9 +---- src/utils/issue.ts | 1 - yarn.lock | 76 +++---------------------------------- 7 files changed, 12 insertions(+), 97 deletions(-) diff --git a/package.json b/package.json index be1396f..1f50699 100644 --- a/package.json +++ b/package.json @@ -32,6 +32,7 @@ "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", + "@ubiquity-dao/ubiquibot-logger": "^1.3.0", "dotenv": "16.4.5", "openai": "^4.52.7", "typebox-validators": "0.3.5" diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index 2417212..d8410de 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -23,6 +23,6 @@ export async function addCommentToIssue(context: Context, message: string, diff body: message, }); } catch (e: unknown) { - context.logger.fatal("Adding a comment failed!", e); + context.logger.error("Adding a comment failed!", { e }); } } diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index d42cf8f..0d6a234 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -33,7 +33,6 @@ export async function askQuestion(context: Context, question: string) { // we are only going one level deep with the linked issue context fetching for (const issue of linkedIssues) { - console.log(`Fetching linked issue ${issue.issueNumber}`); const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }); linkedIssueComments.push(...fetched); } diff --git a/src/plugin.ts b/src/plugin.ts index 80f8359..50dc233 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -3,6 +3,7 @@ import { Env, PluginInputs, SupportedEventsU } from "./types"; import { Context } from "./types"; import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; +import { Logs } from "@ubiquity-dao/ubiquibot-logger"; /** * How a worker executes the plugin. @@ -16,23 +17,7 @@ export async function plugin(inputs: PluginInputs, env: Env) { config: inputs.settings, octokit, env, - logger: { - debug(message: unknown, ...optionalParams: unknown[]) { - console.debug(message, ...optionalParams); - }, - info(message: unknown, ...optionalParams: unknown[]) { - console.log(message, ...optionalParams); - }, - warn(message: unknown, ...optionalParams: unknown[]) { - console.warn(message, ...optionalParams); - }, - error(message: unknown, ...optionalParams: unknown[]) { - console.error(message, ...optionalParams); - }, - fatal(message: unknown, ...optionalParams: unknown[]) { - console.error(message, ...optionalParams); - }, - }, + logger: new Logs("info"), adapters: {} as never, }; const { diff --git a/src/types/context.ts b/src/types/context.ts index 0b696a0..437c713 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -2,6 +2,7 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; +import { Logs } from "@ubiquity-dao/ubiquibot-logger" export type SupportedEventsU = "issue_comment.created"; @@ -16,11 +17,5 @@ export interface Context void; - error: (message: unknown, ...optionalParams: unknown[]) => void; - warn: (message: unknown, ...optionalParams: unknown[]) => void; - info: (message: unknown, ...optionalParams: unknown[]) => void; - debug: (message: unknown, ...optionalParams: unknown[]) => void; - }; + logger: Logs; } diff --git a/src/utils/issue.ts b/src/utils/issue.ts index 2a83c6c..7e8075b 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -123,7 +123,6 @@ export async function getLinkedIssueContextFromComments(context: Context, issueC // we are only going one level deep with the linked issue context fetching for (const issue of linkedIssues) { - console.log(`Fetching linked issue ${issue.issueNumber}`); const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }); linkedIssueComments.push(...fetched); } diff --git a/yarn.lock b/yarn.lock index fedf3d4..b6d449b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1801,63 +1801,6 @@ ignore "^5.1.8" p-map "^4.0.0" -"@supabase/auth-js@2.64.2": - version "2.64.2" - resolved "https://registry.yarnpkg.com/@supabase/auth-js/-/auth-js-2.64.2.tgz#fe6828ed2c9844bf2e71b27f88ddfb635f24d1c1" - integrity sha512-s+lkHEdGiczDrzXJ1YWt2y3bxRi+qIUnXcgkpLSrId7yjBeaXBFygNjTaoZLG02KNcYwbuZ9qkEIqmj2hF7svw== - dependencies: - "@supabase/node-fetch" "^2.6.14" - -"@supabase/functions-js@2.4.1": - version "2.4.1" - resolved "https://registry.yarnpkg.com/@supabase/functions-js/-/functions-js-2.4.1.tgz#373e75f8d3453bacd71fb64f88d7a341d7b53ad7" - integrity sha512-8sZ2ibwHlf+WkHDUZJUXqqmPvWQ3UHN0W30behOJngVh/qHHekhJLCFbh0AjkE9/FqqXtf9eoVvmYgfCLk5tNA== - dependencies: - "@supabase/node-fetch" "^2.6.14" - -"@supabase/node-fetch@2.6.15", "@supabase/node-fetch@^2.6.14": - version "2.6.15" - resolved "https://registry.yarnpkg.com/@supabase/node-fetch/-/node-fetch-2.6.15.tgz#731271430e276983191930816303c44159e7226c" - integrity sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ== - dependencies: - whatwg-url "^5.0.0" - -"@supabase/postgrest-js@1.15.5": - version "1.15.5" - resolved "https://registry.yarnpkg.com/@supabase/postgrest-js/-/postgrest-js-1.15.5.tgz#7fa7744cb0991328bb1a7757861e435a5477f358" - integrity sha512-YR4TiitTE2hizT7mB99Cl3V9i00RAY5sUxS2/NuWWzkreM7OeYlP2OqnqVwwb4z6ILn+j8x9e/igJDepFhjswQ== - dependencies: - "@supabase/node-fetch" "^2.6.14" - -"@supabase/realtime-js@2.9.5": - version "2.9.5" - resolved "https://registry.yarnpkg.com/@supabase/realtime-js/-/realtime-js-2.9.5.tgz#22b7de952a7f37868ffc25d32d19f03f27bfcb40" - integrity sha512-TEHlGwNGGmKPdeMtca1lFTYCedrhTAv3nZVoSjrKQ+wkMmaERuCe57zkC5KSWFzLYkb5FVHW8Hrr+PX1DDwplQ== - dependencies: - "@supabase/node-fetch" "^2.6.14" - "@types/phoenix" "^1.5.4" - "@types/ws" "^8.5.10" - ws "^8.14.2" - -"@supabase/storage-js@2.6.0": - version "2.6.0" - resolved "https://registry.yarnpkg.com/@supabase/storage-js/-/storage-js-2.6.0.tgz#0fa5e04db760ed7f78e4394844a6d409e537adc5" - integrity sha512-REAxr7myf+3utMkI2oOmZ6sdplMZZ71/2NEIEMBZHL9Fkmm3/JnaOZVSRqvG4LStYj2v5WhCruCzuMn6oD/Drw== - dependencies: - "@supabase/node-fetch" "^2.6.14" - -"@supabase/supabase-js@2.43.5": - version "2.43.5" - resolved "https://registry.yarnpkg.com/@supabase/supabase-js/-/supabase-js-2.43.5.tgz#e4d5f9e5e21ef4226e0cb013c7e51fb3c5262581" - integrity sha512-Y4GukjZWW6ouohMaPlYz8tSz9ykf9jY7w9/RhqKuScmla3Xiklce8eLr8TYAtA+oQYCWxo3RgS3B6O4rd/72FA== - dependencies: - "@supabase/auth-js" "2.64.2" - "@supabase/functions-js" "2.4.1" - "@supabase/node-fetch" "2.6.15" - "@supabase/postgrest-js" "1.15.5" - "@supabase/realtime-js" "2.9.5" - "@supabase/storage-js" "2.6.0" - "@types/babel__core@^7.1.14": version "7.20.5" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017" @@ -1982,11 +1925,6 @@ dependencies: undici-types "~5.26.4" -"@types/phoenix@^1.5.4": - version "1.6.4" - resolved "https://registry.yarnpkg.com/@types/phoenix/-/phoenix-1.6.4.tgz#cceac93a827555473ad38057d1df7d06eef1ed71" - integrity sha512-B34A7uot1Cv0XtaHRYDATltAdKx0BvVKNgYNqE4WjtPUa4VQJM7kxeXcVKaH+KS+kCmZ+6w+QaUdcljiheiBJA== - "@types/pluralize@^0.0.29": version "0.0.29" resolved "https://registry.yarnpkg.com/@types/pluralize/-/pluralize-0.0.29.tgz#6ffa33ed1fc8813c469b859681d09707eb40d03c" @@ -2012,13 +1950,6 @@ resolved "https://registry.yarnpkg.com/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz#18b97a972f94f60a679fd5c796d96421b9abb9fd" integrity sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g== -"@types/ws@^8.5.10": - version "8.5.10" - resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.10.tgz#4acfb517970853fa6574a3a6886791d04a396787" - integrity sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A== - dependencies: - "@types/node" "*" - "@types/yargs-parser@*": version "21.0.3" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" @@ -2112,6 +2043,11 @@ "@typescript-eslint/types" "7.13.1" eslint-visitor-keys "^3.4.3" +"@ubiquity-dao/ubiquibot-logger@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@ubiquity-dao/ubiquibot-logger/-/ubiquibot-logger-1.3.0.tgz#b07364658be95b3be3876305c66b2adc906e9590" + integrity sha512-ifkd7fB2OMTSt3OL9L14bCIvCMXV+IHFdJYU5S8FUzE2U88b4xKxuEAYDFX+DX3wwDEswFAVUwx5aP3QcMIRWA== + JSONStream@^1.3.5: version "1.3.5" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" @@ -6612,7 +6548,7 @@ write-file-atomic@^4.0.2: imurmurhash "^0.1.4" signal-exit "^3.0.7" -ws@^8.11.0, ws@^8.14.2: +ws@^8.11.0: version "8.17.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.0.tgz#d145d18eca2ed25aaf791a183903f7be5e295fea" integrity sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow== From 6b0333b1bfb8d9d1c41eae8ad46e3b219447da85 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 16:34:07 +0100 Subject: [PATCH 13/72] fix: ignore all bot comments --- src/handlers/ask-gpt.ts | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 0d6a234..a63a538 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -49,6 +49,8 @@ export async function askQuestion(context: Context, question: string) { ); const formattedChat = formatChatHistory(context, streamlinedComments, specAndBodies, linkedPulls); + logger.info(`Formatted chat history`, { formattedChat }); + return await askGpt(context, formattedChat); } @@ -166,17 +168,9 @@ function streamlineComments(comments: IssueComments) { const streamlined: Record = {}; for (const comment of comments) { - const user = comment.user?.login; - if ( - user === "ubiquibot" || - user === "ubiquibot[bot]" || - user === "ubiquibot-v2-testing" || - user === "ubiquibot-dev[bot]" || - user === "ubqbot[bot]" || // TODO: remove this - user === "github-actions[bot]" - ) { - const isPreviousAnswer = comment.body?.includes(""); - if (!isPreviousAnswer) continue; + const user = comment.user; + if (user && user.type === "Bot") { + continue; } const body = comment.body; @@ -188,7 +182,7 @@ function streamlineComments(comments: IssueComments) { if (user && body) { streamlined[key].push({ - user, + user: user.login, body, id: comment.id, }); From a72c97b001808f1f963b7841e050f1c5426fc825 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 16:58:42 +0100 Subject: [PATCH 14/72] chore: use string arrays, remove never configs --- .github/workflows/compute.yml | 6 +-- src/plugin.ts | 4 +- src/types/context.ts | 3 -- src/utils/format-chat-history.ts | 77 +++++++++++++++++--------------- src/worker.ts | 2 +- 5 files changed, 47 insertions(+), 45 deletions(-) diff --git a/.github/workflows/compute.yml b/.github/workflows/compute.yml index 9a34dce..09e35d5 100644 --- a/.github/workflows/compute.yml +++ b/.github/workflows/compute.yml @@ -1,4 +1,4 @@ -name: "ubiquibot-gpt-command" +name: "command-gpt" on: workflow_dispatch: @@ -18,7 +18,7 @@ on: jobs: compute: - name: "gpt-command" + name: "command-gpt" runs-on: ubuntu-latest permissions: write-all @@ -35,4 +35,4 @@ jobs: - name: execute directive run: npx tsx ./src/main.ts - id: gpt-command + id: command-gpt diff --git a/src/plugin.ts b/src/plugin.ts index 50dc233..83c5473 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -8,7 +8,7 @@ import { Logs } from "@ubiquity-dao/ubiquibot-logger"; /** * How a worker executes the plugin. */ -export async function plugin(inputs: PluginInputs, env: Env) { +export async function plugin(inputs: PluginInputs) { const octokit = new Octokit({ auth: inputs.authToken }); const context: Context = { @@ -16,9 +16,7 @@ export async function plugin(inputs: PluginInputs, env: Env) { payload: inputs.eventPayload, config: inputs.settings, octokit, - env, logger: new Logs("info"), - adapters: {} as never, }; const { logger, diff --git a/src/types/context.ts b/src/types/context.ts index 437c713..59d4fa7 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -1,6 +1,5 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; -import { Env } from "./env"; import { PluginSettings } from "./plugin-inputs"; import { Logs } from "@ubiquity-dao/ubiquibot-logger" @@ -14,8 +13,6 @@ export interface Context; - adapters: never; config: PluginSettings; - env: Env; logger: Logs; } diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index 40ffa90..e895c48 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -17,20 +17,22 @@ export function formatChatHistory( specOrBody: specAndBodies[specAndBodyKeys[0]], }; - let issueSpecBlock = ""; - issueSpecBlock += createHeader("Project Specification", specAndBodyKeys[0]); - issueSpecBlock += createSpecOrBody(curIssue.specOrBody); - issueSpecBlock += createFooter("Project Specification"); - - let issueCommentBlock = ""; - issueCommentBlock += createHeader("Issue Conversation", convoKeys[0]); - issueCommentBlock += createComment({ - issue: parseInt(convoKeys[0].split("/")[2]), - repo: convoKeys[0].split("/")[1], - org: convoKeys[0].split("/")[0], - comments: curIssue.convo, - }); - issueCommentBlock += createFooter("Issue Conversation"); + const issueSpecBlock: string[] = [ + createHeader("Project Specification", specAndBodyKeys[0]), + createSpecOrBody(curIssue.specOrBody), + createFooter("Project Specification") + ] + + const issueCommentBlock: string[] = [ + createHeader("Issue Conversation", convoKeys[0]), + createComment({ + issue: parseInt(convoKeys[0].split("/")[2]), + repo: convoKeys[0].split("/")[1], + org: convoKeys[0].split("/")[0], + comments: curIssue.convo, + }), + createFooter("Issue Conversation") + ]; delete convoKeys[0]; @@ -41,32 +43,37 @@ export function formatChatHistory( const specHeader = isPull ? `Linked Pull #${issue} Request Body` : `Linked Issue #${issue} Specification`; const specOrBody = specAndBodies[key]; - let specOrBodyBlock = createHeader(specHeader, key); - specOrBodyBlock += createSpecOrBody(specOrBody); - specOrBodyBlock += createFooter(specHeader); + const specOrBodyBlock = [ + createHeader(specHeader, key), + createSpecOrBody(specOrBody), + createFooter(specHeader) + ] const header = isPull ? `Linked Pull #${issue} Request Conversation` : `Linked Issue #${issue} Conversation`; const repoString = `${org}/${repo} #${issue}`; const diff = isPull ? await fetchPullRequestDiff(context, org, repo, issue) : null; - let block = ""; - block += specOrBodyBlock; - block += createHeader(header, repoString); - block += createComment({ issue: parseInt(issue), repo, org, comments }); - block += createFooter(header); + const block = [ + specOrBodyBlock.join(""), + createHeader(header, repoString), + createComment({ issue: parseInt(issue), repo, org, comments }), + createFooter(header) + ] if (!isPull) { - return block; + return block.join(""); } - let diffBlock = ""; - diffBlock += createHeader("Linked Pull Request Code Diff", repoString); - diffBlock += diff ? diff : "No diff available"; - diffBlock += createFooter("Linked Pull Request Code Diff"); - return block + diffBlock; + const diffBlock = [ + createHeader("Linked Pull Request Code Diff", repoString), + diff ? diff : "No diff available", + createFooter("Linked Pull Request Code Diff") + ] + + return block.join("") + diffBlock.join(""); }); - return issueSpecBlock + issueCommentBlock + linkedContextBlocks.join(""); + return issueSpecBlock.join("") + issueCommentBlock.join("") + linkedContextBlocks.join(""); } function createHeader(content: string, repoString: string) { @@ -78,11 +85,11 @@ function createFooter(content: string) { } function createComment(comment: StreamlinedComments) { - let comments = ""; + const comments = [] for (const c of comment.comments) { - comments += `${c.id} ${c.user}: ${c.body}\n`; + comments.push(`${c.id} ${c.user}: ${c.body}\n`); } - return comments; + return comments.join(""); } function createSpecOrBody(specOrBody: string) { @@ -94,10 +101,10 @@ export function createChatHistory(formattedChat: string) { const systemMessage: ChatCompletionMessageParam = { role: "system", - - content: `Using the provided context, address the question being asked and make sure to provide a clear and concise answer with no follow-up statements. + content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. + Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. - Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, + Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`.trim(), }; const userMessage: ChatCompletionMessageParam = { diff --git a/src/worker.ts b/src/worker.ts index 3048b5d..0f76fd1 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -46,7 +46,7 @@ export default { } webhookPayload.settings = settings; - await plugin(webhookPayload, env); + await plugin(webhookPayload); return new Response(JSON.stringify("OK"), { status: 200, headers: { "content-type": "application/json" } }); } catch (error) { return handleUncaughtError(error); From c7b6605ba25fa0438a473905717b2ea314e67856 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sat, 13 Jul 2024 17:26:47 +0100 Subject: [PATCH 15/72] feat: deeper linked context fetching --- src/handlers/ask-gpt.ts | 3 +- src/types/plugin-inputs.ts | 3 +- src/utils/issue.ts | 84 +++++++++++++++++++++----------------- 3 files changed, 50 insertions(+), 40 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index a63a538..c4962c1 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -10,6 +10,7 @@ export async function askQuestion(context: Context, question: string) { const { logger, payload: { issue: currentIssue }, + config: { linkedIssueFetchDepth }, } = context; if (!question) { @@ -28,7 +29,7 @@ export async function askQuestion(context: Context, question: string) { ); const issueComments = await fetchIssueComments({ context }); - const linkedIssueContext = await getLinkedIssueContextFromComments(context, issueComments); + const linkedIssueContext = await getLinkedIssueContextFromComments(context, issueComments, linkedIssueFetchDepth); const { linkedIssues, linkedIssueComments } = linkedIssueContext; // we are only going one level deep with the linked issue context fetching diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 166b623..ca3e163 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -21,7 +21,8 @@ export interface PluginInputs Date: Sun, 14 Jul 2024 15:49:28 +0100 Subject: [PATCH 16/72] chore: types and eslint ignore .wrangler --- eslint.config.mjs | 2 +- src/main.ts | 5 ++--- src/plugin.ts | 2 +- src/types/context.ts | 2 +- src/types/github.ts | 19 +++++++++++++++++++ src/types/gpt.ts | 10 +++++++++- 6 files changed, 33 insertions(+), 7 deletions(-) diff --git a/eslint.config.mjs b/eslint.config.mjs index e53d263..c714515 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -9,7 +9,7 @@ export default tsEslint.config({ "@typescript-eslint": tsEslint.plugin, "check-file": checkFile, }, - ignores: [".github/knip.ts"], + ignores: [".github/knip.ts", ".wrangler/**/*.ts", ".wrangler/**/*.js"], extends: [eslint.configs.recommended, ...tsEslint.configs.recommended, sonarjs.configs.recommended], languageOptions: { parser: tsEslint.parser, diff --git a/src/main.ts b/src/main.ts index b46765e..15db8fb 100644 --- a/src/main.ts +++ b/src/main.ts @@ -2,7 +2,7 @@ import * as core from "@actions/core"; import * as github from "@actions/github"; import { Octokit } from "@octokit/rest"; import { Value } from "@sinclair/typebox/value"; -import { envSchema, pluginSettingsSchema, PluginInputs, pluginSettingsValidator } from "./types"; +import { pluginSettingsSchema, PluginInputs, pluginSettingsValidator } from "./types"; import { plugin } from "./plugin"; /** @@ -11,7 +11,6 @@ import { plugin } from "./plugin"; export async function run() { const payload = github.context.payload.inputs; - const env = Value.Decode(envSchema, payload.env); const settings = Value.Decode(pluginSettingsSchema, Value.Default(pluginSettingsSchema, JSON.parse(payload.settings))); if (!pluginSettingsValidator.test(settings)) { @@ -27,7 +26,7 @@ export async function run() { ref: payload.ref, }; - await plugin(inputs, env); + await plugin(inputs); return returnDataToKernel(inputs.authToken, inputs.stateId, {}); } diff --git a/src/plugin.ts b/src/plugin.ts index 83c5473..fbb66f6 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,5 +1,5 @@ import { Octokit } from "@octokit/rest"; -import { Env, PluginInputs, SupportedEventsU } from "./types"; +import { PluginInputs, SupportedEventsU } from "./types"; import { Context } from "./types"; import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; diff --git a/src/types/context.ts b/src/types/context.ts index 59d4fa7..5fd5504 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -1,7 +1,7 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; import { PluginSettings } from "./plugin-inputs"; -import { Logs } from "@ubiquity-dao/ubiquibot-logger" +import { Logs } from "@ubiquity-dao/ubiquibot-logger"; export type SupportedEventsU = "issue_comment.created"; diff --git a/src/types/github.ts b/src/types/github.ts index 908aad0..2b7d857 100644 --- a/src/types/github.ts +++ b/src/types/github.ts @@ -1,4 +1,23 @@ import { RestEndpointMethodTypes } from "@octokit/rest"; +import { Context } from "./context"; export type Issue = RestEndpointMethodTypes["issues"]["get"]["response"]["data"]; export type IssueComments = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"]; +export type ReviewComments = RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"]; +export type IssueComment = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"][0]; +export type ReviewComment = RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"][0]; + +export type FetchParams = { + context: Context; + issueNum?: number; + owner?: string; + repo?: string; +}; +export type LinkedIssues = { + issueNumber: number; + repo: string; + owner: string; + url: string; + comments?: IssueComments | ReviewComments; + body?: string; +}; diff --git a/src/types/gpt.ts b/src/types/gpt.ts index c84e649..523f872 100644 --- a/src/types/gpt.ts +++ b/src/types/gpt.ts @@ -1,7 +1,15 @@ export type StreamlinedComment = { + id: number; user?: string; body?: string; - id: number; + org: string; + repo: string; + isPull: boolean; + issueUrl: string; + specOrBody?: { + html: string; + text: string; + }; }; export type StreamlinedComments = { From 154a9b226ecb99d1efe3e27db82a7689e4a3b0ff Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sun, 14 Jul 2024 15:51:13 +0100 Subject: [PATCH 17/72] chore: simplify main handler --- src/handlers/ask-gpt.ts | 181 ++-------------------------------------- 1 file changed, 5 insertions(+), 176 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index c4962c1..fd68e8d 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,17 +1,12 @@ import OpenAI from "openai"; import { Context } from "../types"; -import { fetchIssue, fetchIssueComments, getLinkedIssueContextFromComments, idIssueFromComment } from "../utils/issue"; -import { IssueComments } from "../types/github"; -import { StreamlinedComment } from "../types/gpt"; +import { recursivelyFetchLinkedIssues } from "../utils/issue"; + import { createChatHistory, formatChatHistory } from "../utils/format-chat-history"; import { addCommentToIssue } from "./add-comment"; export async function askQuestion(context: Context, question: string) { - const { - logger, - payload: { issue: currentIssue }, - config: { linkedIssueFetchDepth }, - } = context; + const { logger } = context; if (!question) { logger.error(`No question provided`); @@ -19,179 +14,13 @@ export async function askQuestion(context: Context, question: string) { return; } - const { body: issueSpecOrPullBody, repository_url } = currentIssue; - const org = repository_url.split("/")[4]; - - const { specReferencedIssueBody, specReferencedIssueKey, streamlinedSpecReferencedIssueComments } = await getSpecReferencedContext( - context, - org, - issueSpecOrPullBody - ); - - const issueComments = await fetchIssueComments({ context }); - const linkedIssueContext = await getLinkedIssueContextFromComments(context, issueComments, linkedIssueFetchDepth); - const { linkedIssues, linkedIssueComments } = linkedIssueContext; - - // we are only going one level deep with the linked issue context fetching - for (const issue of linkedIssues) { - const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }); - linkedIssueComments.push(...fetched); - } - - const streamlinedComments = await getAllStreamlinedComments(issueComments, streamlinedSpecReferencedIssueComments, linkedIssueComments); - const { linkedPulls, specAndBodies } = await getSpecBodiesAndLinkedPulls( - context, - repository_url, - currentIssue.number, - issueSpecOrPullBody, - specReferencedIssueBody, - specReferencedIssueKey, - linkedIssues - ); - const formattedChat = formatChatHistory(context, streamlinedComments, specAndBodies, linkedPulls); + const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); - logger.info(`Formatted chat history`, { formattedChat }); + const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); return await askGpt(context, formattedChat); } -async function getAllStreamlinedComments( - issueComments: IssueComments, - streamlinedSpecReferencedIssueComments: Record | undefined, - linkedIssueComments: IssueComments -) { - const streamlinedComments = streamlineComments(issueComments) ?? {}; - - if (streamlinedSpecReferencedIssueComments && Object.keys(streamlinedSpecReferencedIssueComments).length > 0) { - for (const [key, value] of Object.entries(streamlinedSpecReferencedIssueComments)) { - if (!streamlinedComments[key]) { - streamlinedComments[key] = value; - continue; - } - - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; - } - } - - if (linkedIssueComments.length > 0) { - const linkedStreamlinedComments = streamlineComments(linkedIssueComments); - - if (linkedStreamlinedComments) { - for (const [key, value] of Object.entries(linkedStreamlinedComments)) { - if (!streamlinedComments[key]) { - streamlinedComments[key] = value; - continue; - } - - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; - } - } - } - - return streamlinedComments; -} - -async function getSpecBodiesAndLinkedPulls( - context: Context, - currentIssueUrl: string, - currentIssueNumber: number, - issueSpecOrPullBody: string | null, - specReferencedIssueBody: string | null | undefined, - specReferencedIssueKey: string | null | undefined, - linkedIssues: { issueNumber: number; repo: string }[] -) { - const linkedPulls: Record = {}; - const currentIssueKey = createKey(currentIssueUrl, currentIssueNumber); - // collect specifically all of the spec and PR bodies - const specAndBodies: Record = {}; - specAndBodies[currentIssueKey] = issueSpecOrPullBody || ""; - specAndBodies[specReferencedIssueKey as string] = specReferencedIssueBody || ""; - - for (const linkedIssue of linkedIssues) { - const issue = await fetchIssue({ context, issueNum: linkedIssue.issueNumber, repo: linkedIssue.repo }); - const { body, repository_url, pull_request } = issue; - const linkedIssueKey = createKey(repository_url, linkedIssue.issueNumber); - specAndBodies[linkedIssueKey] = body || ""; - - if (pull_request) { - linkedPulls[linkedIssueKey] = true; - } - } - - return { specAndBodies, linkedPulls }; -} - -async function getSpecReferencedContext(context: Context, org: string, issueSpecOrPullBody: string | null) { - // fetch the spec referenced issue if it exists - const specReferencedIssueId = idIssueFromComment(org, issueSpecOrPullBody); - let specReferencedIssue, - specReferencedIssueBody, - specReferencedIssueRepoUrl, - specReferencedIssueComments, - specReferencedIssueKey, - streamlinedSpecReferencedIssueComments; - - if (specReferencedIssueId) { - specReferencedIssue = await fetchIssue({ context, issueNum: specReferencedIssueId.issueNumber }); - specReferencedIssueBody = specReferencedIssue.body; - specReferencedIssueRepoUrl = specReferencedIssue.repository_url; - specReferencedIssueComments = await fetchIssueComments({ context, issueNum: specReferencedIssueId.issueNumber, repo: specReferencedIssueId.repo }); - specReferencedIssueKey = createKey(specReferencedIssueRepoUrl, specReferencedIssueId?.issueNumber); - streamlinedSpecReferencedIssueComments = streamlineComments(specReferencedIssueComments) ?? {}; - } - - return { - specReferencedIssue, - specReferencedIssueBody, - specReferencedIssueRepoUrl, - specReferencedIssueComments, - specReferencedIssueKey, - streamlinedSpecReferencedIssueComments, - }; -} - -function createKey(issueUrl: string, issue?: number) { - const splitUrl = issueUrl?.split("/"); - const issueNumber = issue || parseInt(splitUrl?.pop() || ""); - const issueRepo = splitUrl?.slice(-2).join("/"); - const issueOrg = splitUrl?.slice(-3, -2).join("/"); - - if (issueOrg.startsWith("repos")) { - return `${issueRepo}/issues/${issueNumber}`; - } - - return `${issueOrg}/${issueRepo}/${issueNumber}`; -} - -function streamlineComments(comments: IssueComments) { - const streamlined: Record = {}; - - for (const comment of comments) { - const user = comment.user; - if (user && user.type === "Bot") { - continue; - } - - const body = comment.body; - const key = createKey(comment.issue_url); - - if (!streamlined[key]) { - streamlined[key] = []; - } - - if (user && body) { - streamlined[key].push({ - user: user.login, - body, - id: comment.id, - }); - } - } - return streamlined; -} - export async function askGpt(context: Context, formattedChat: string) { const { logger, From 2b86ab2fa7720fbc9b76ddd2c921845e98b0295e Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sun, 14 Jul 2024 15:52:45 +0100 Subject: [PATCH 18/72] feat: comments handler --- src/handlers/comments.ts | 76 ++++++++++++++++++++++++++++++++++++++++ src/types/gpt.ts | 1 - 2 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 src/handlers/comments.ts diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts new file mode 100644 index 0000000..5c33c7b --- /dev/null +++ b/src/handlers/comments.ts @@ -0,0 +1,76 @@ +import { IssueComments, LinkedIssues, ReviewComments } from "../types/github"; +import { StreamlinedComment } from "../types/gpt"; + +export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { + const streamlinedComments: Record = {}; + + for (const issue of linkedIssues) { + const linkedIssueComments = issue.comments; + if (!linkedIssueComments) continue; + + if (linkedIssueComments.length > 0) { + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); + + if (linkedStreamlinedComments) { + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { + if (!streamlinedComments[key]) { + streamlinedComments[key] = value; + continue; + } + + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; + } + } + } + } + + return streamlinedComments; +} + +export function createKey(issueUrl: string, issue?: number) { + if (!issueUrl) throw new Error("issueUrl is required"); + if (issueUrl.includes("undefined")) { + throw new Error("issueUrl is not valid"); + } + const [, , , , issueOrg, issueRepo, , issueNumber] = issueUrl.split("/"); + + return `${issueOrg}/${issueRepo}/${issueNumber || issue}`; +} + +export function streamlineComments(comments: IssueComments | ReviewComments) { + const streamlined: Record = {}; + + for (const comment of comments) { + const user = comment.user; + if (user && user.type === "Bot") { + continue; + } + + let url = ""; + if ("issue_url" in comment) { + url = comment.issue_url; + } else if ("pull_request_url" in comment) { + url = comment.pull_request_url; + } + + const body = comment.body; + const key = createKey(url); + + if (!streamlined[key]) { + streamlined[key] = []; + } + + if (user && body) { + streamlined[key].push({ + user: user.login, + body, + id: comment.id, + org: url.split("/")[4], + repo: url.split("/")[5], + issueUrl: url, + }); + } + } + return streamlined; +} diff --git a/src/types/gpt.ts b/src/types/gpt.ts index 523f872..0b2a5ce 100644 --- a/src/types/gpt.ts +++ b/src/types/gpt.ts @@ -4,7 +4,6 @@ export type StreamlinedComment = { body?: string; org: string; repo: string; - isPull: boolean; issueUrl: string; specOrBody?: { html: string; From 7e6582b069523bd21bee61c510bdea3eefdd1042 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sun, 14 Jul 2024 15:57:54 +0100 Subject: [PATCH 19/72] chore: improved context handling --- src/utils/issue.ts | 367 +++++++++++++++++++++++++++------------------ 1 file changed, 219 insertions(+), 148 deletions(-) diff --git a/src/utils/issue.ts b/src/utils/issue.ts index af26be0..f71a981 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -1,208 +1,279 @@ +import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; -import { Issue, IssueComments } from "../types/github"; +import { FetchParams, Issue, LinkedIssues } from "../types/github"; +import { StreamlinedComment } from "../types/gpt"; -type FetchParams = { - context: Context; - issueNum?: number; - owner?: string; - repo?: string; -}; +export async function recursivelyFetchLinkedIssues(params: FetchParams) { + const { + context: { logger }, + } = params; -/** - * Because in the eyes of the GitHub api Pull Requests are also - * issues, we can use the same functions for both. - */ + const { linkedIssues, seen, specOrBodies, streamlinedComments } = await fetchLinkedIssues(params); -export async function fetchIssue(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; + logger.info(`Fetching linked issues`, { specOrBodies, streamlinedComments, seen: Array.from(seen) }); - return await octokit.issues - .get({ - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }) - .then(({ data }) => data as Issue); -} - -export async function fetchIssueComments(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; + for (const linkedIssue of linkedIssues) { + const comments = linkedIssue.comments; + if (!comments) { + continue; + } + const streamed = await getAllStreamlinedComments([linkedIssue]); - return await octokit - .paginate(octokit.issues.listComments, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }) - .then((comments) => comments as IssueComments); -} + for (const [key, value] of Object.entries(streamed)) { + if (!streamlinedComments[key]) { + streamlinedComments[key] = value; + continue; + } -export async function fetchLinkedIssues(params: FetchParams, comments?: IssueComments) { - let issueComments: IssueComments | undefined = comments; - const linkedIssues: { - issueNumber: number; - repo: string; - }[] = []; + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; + } - if (!issueComments && !params) { - throw new Error("Either issueComments or params must be provided"); - } + if (!linkedIssue.body) { + continue; + } - if (!issueComments) { - issueComments = await fetchIssueComments(params); + await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen); } - const { - context: { - logger, - payload: { - repository: { - owner: { login }, - }, - }, - }, - } = params; - - if (!issueComments) { - logger.info("No comments found on issue"); - return linkedIssues; - } + const linkedIssuesKeys = linkedIssues.map((issue) => createKey(issue.url, issue.issueNumber)); + const specAndBodyKeys = Array.from(new Set(Object.keys(specOrBodies).concat(Object.keys(streamlinedComments)).concat(linkedIssuesKeys))); + + for (const key of specAndBodyKeys) { + let comments = streamlinedComments[key]; + if (!comments) { + const [owner, repo, issueNumber] = key.split("/"); + await handleIssue({ + ...params, + owner, + repo, + issueNum: parseInt(issueNumber), + }, streamlinedComments) + + comments = streamlinedComments[key]; + } - for (const comment of issueComments) { - const linkedIssue = idIssueFromComment(login, comment.body); - if (linkedIssue) { - linkedIssues.push(linkedIssue); + for (const comment of comments) { + await handleComment(params, comment, streamlinedComments); } } - return await filterLinkedIssues(linkedIssues); + return { + linkedIssues, + specAndBodies: specOrBodies, + streamlinedComments, + }; } -async function recursivelyFetchLinkedIssues(params: FetchParams, linkedIssues: { issueNumber: number; repo: string }[], depth: number) { - const { - context: { - logger, +async function handleIssue(params: FetchParams, streamlinedComments: Record) { + const { comments: fetchedComments, issue } = await fetchIssueComments(params); + const streamlined = await getAllStreamlinedComments([ + { + body: issue.body || "", + comments: fetchedComments, + issueNumber: issue.number, + owner: issue.repository?.owner?.login || "", + repo: issue.repository?.name || "", + url: issue.url, }, - } = params; - - const contextIssues: { - issueNumber: number; - repo: string; - }[] = []; + ]); - if (depth === 0) { - return contextIssues; + for (const [key, value] of Object.entries(streamlined)) { + const previous = streamlinedComments[key] || []; + streamlinedComments[key] = [...previous, ...value]; } +} - let tempIssues: { - issueNumber: number; - repo: string; - }[] = linkedIssues; - - for (let i = 0; i < depth; i++) { - // we need to keep track of the current issues to fetch the next level of linked issues - const currentIssues = tempIssues; - // empty our temp issues to collect the next level of linked issues - tempIssues = []; - - // i + 1 === current depth - for (const issue of currentIssues) { - const linkedIssues = await fetchLinkedIssues({ context: params.context, owner: issue.repo, issueNum: issue.issueNumber }); - for (const linkedIssue of linkedIssues) { - contextIssues.push(linkedIssue); - tempIssues.push(linkedIssue); +async function handleSpec(params: FetchParams, specOrBody: string, specAndBodies: Record, key: string, seen: Set) { + specAndBodies[key] = specOrBody; + const [owner, repo, issueNumber] = key.split("/"); + const anotherReferencedIssue = idIssueFromComment(owner, specOrBody, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); + + if (anotherReferencedIssue) { + const key = createKey(anotherReferencedIssue.url, anotherReferencedIssue.issueNumber); + if (!seen.has(key)) { + seen.add(key); + const issue = await fetchIssue({ + ...params, + owner: anotherReferencedIssue.owner, + repo: anotherReferencedIssue.repo, + issueNum: anotherReferencedIssue.issueNumber, + }); + const body = issue.body; + if (body) { + specAndBodies[key] = body; } } } - - logger.info(`Recursively fetched ${contextIssues.length} linked issues`); - - return contextIssues; } -async function filterLinkedIssues(linkedIssues: { issueNumber: number; repo: string }[]) { - const contextIssues: { - issueNumber: number; - repo: string; - }[] = []; - - for (const issue of linkedIssues) { - if (issue && issue.issueNumber && issue.repo) { - contextIssues.push({ - issueNumber: issue.issueNumber, - repo: issue.repo, - }); +async function handleComment(params: FetchParams, comment: StreamlinedComment, streamlinedComments: Record) { + const [, , , , owner, repo, , issueNumber] = comment.issueUrl.split("/"); + const anotherReferencedIssue = idIssueFromComment(owner, comment.body, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); + + if (anotherReferencedIssue) { + const key = createKey(anotherReferencedIssue.url); + const [owner, repo, issueNumber] = key.split("/"); + + if (!streamlinedComments[key]) { + await handleIssue({ + ...params, + owner, + repo, + issueNum: parseInt(issueNumber), + }, streamlinedComments) } } - - return contextIssues; } -export async function getLinkedIssueContextFromComments(context: Context, issueComments: IssueComments, depth = 5) { - // find any linked issues in comments by parsing the comments and enforcing that the - // linked issue is from the same org that the current issue is from - const linkedIssues = await fetchLinkedIssues({ context }, issueComments); - const linkedIssueContext = await recursivelyFetchLinkedIssues({ context }, linkedIssues, depth); +export async function fetchLinkedIssues(params: FetchParams) { + const { comments, issue } = await fetchIssueComments(params); + const issueKey = createKey(issue.url); + const [owner, repo, issueNumber] = issueKey.split("/"); + const linkedIssues: LinkedIssues[] = [ + { + body: issue.body || "", + comments, + issueNumber: parseInt(issueNumber), + owner, + repo, + url: issue.url, + }, + ]; + + const specOrBodies: Record = {}; + specOrBodies[issueKey] = issue.body || ""; - // the conversational history of the linked issues - const linkedIssueComments: IssueComments = []; + const seen = new Set(); + seen.add(issueKey); + + for (const comment of comments) { + let url = ""; + if ("issue_url" in comment) { + url = comment.issue_url; + } else if ("pull_request_url" in comment) { + url = comment.pull_request_url; + } + const linkedIssue = idIssueFromComment(url.split("/")[4], comment.body, { + repo: url.split("/")[5], + issueNum: parseInt(url.split("/")[7]), + context: params.context, + }); + if (linkedIssue) { + const key = createKey(linkedIssue.url, linkedIssue.issueNumber); + seen.add(key); + + const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ + context: params.context, + issueNum: linkedIssue.issueNumber, + owner: linkedIssue.owner, + repo: linkedIssue.repo, + }); - for (const issue of [...linkedIssues, ...linkedIssueContext]) { - const fetched = await fetchIssueComments({ context, issueNum: issue.issueNumber, repo: issue.repo }); - linkedIssueComments.push(...fetched); + specOrBodies[key] = fetchedIssue.body || ""; + linkedIssue.body = fetchedIssue.body || ""; + linkedIssue.comments = fetchedComments; + linkedIssues.push(linkedIssue); + } } - return { linkedIssues, linkedIssueComments }; + return { + streamlinedComments: await getAllStreamlinedComments(linkedIssues), + linkedIssues, + specOrBodies, + seen, + }; } -export function idIssueFromComment(owner?: string, comment?: string | null) { +export function idIssueFromComment(owner?: string, comment?: string | null, params?: FetchParams): LinkedIssues | null { if (!comment) { return null; } - if (!owner) { - throw new Error("Owner must be provided when parsing linked issues"); - } - // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag - const urlMatch = comment.match(/https:\/\/github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); - - const linkedIssue: { - issueNumber: number; - repo: string; - } = { - issueNumber: 0, - repo: "", - }; - /** - * If following the rule that only issues from the same org should be included - * then we need to be sure that this format of linked issue is from the same org. - */ + // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag + const urlMatch = comment.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); + const hashMatch = comment.match(/#(\d+)/); + + if (hashMatch) { + return { + owner: owner || params?.owner || "", + repo: params?.repo || "", + issueNumber: parseInt(hashMatch[1]), + url: `https://api.github.com/repos/${params?.owner || owner}/${params?.repo}/issues/${hashMatch[1]}`, + } as LinkedIssues; + } if (urlMatch) { - linkedIssue.issueNumber = parseInt(urlMatch[4]); - linkedIssue.repo = urlMatch[2]; + return { + url: `https://api.github.com/repos/${urlMatch[1]}/${urlMatch[2]}/issues/${urlMatch[4]}`, + owner: owner ?? urlMatch[1], + repo: urlMatch[2], + issueNumber: parseInt(urlMatch[4]), + } as LinkedIssues; } - return linkedIssue; + return null; } -export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: string) { +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { const { logger, octokit } = context; try { const diff = await octokit.pulls.get({ owner: org, repo, - pull_number: parseInt(issue), + pull_number: issue, mediaType: { format: "diff", }, }); return diff.data as unknown as string; - } catch (error) { - logger.error(`Error fetching pull request diff: ${error}`); + } catch (e) { + logger.error(`Error fetching pull request diff: `, { e }); return null; } } + +export async function fetchIssue(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.issues + .get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }) + .then(({ data }) => data as Issue); +} + +export async function fetchIssueComments(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + const issue = await fetchIssue(params); + + let comments; + if (issue.pull_request) { + /** + * With every review comment with a tagged code line we have `diff_hunk` which is great context + * but could easily max our tokens. + */ + comments = await octokit.paginate(octokit.pulls.listReviewComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + pull_number: issueNum || payload.issue.number, + }); + } else { + comments = await octokit.paginate(octokit.issues.listComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + } + + return { + issue, + comments, + }; +} From 8a0a796f05c7fb832131a30da74cda1c82480362 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sun, 14 Jul 2024 15:58:55 +0100 Subject: [PATCH 20/72] chore: refactor chat formatting, remove no diff error log --- src/utils/format-chat-history.ts | 120 +++++++++++++++---------------- src/utils/issue.ts | 3 +- 2 files changed, 57 insertions(+), 66 deletions(-) diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index e895c48..1ec113f 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -2,78 +2,70 @@ import { ChatCompletionMessageParam } from "openai/resources"; import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; import { fetchPullRequestDiff } from "./issue"; +import { createKey } from "../handlers/comments"; -export function formatChatHistory( +export async function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record) { + const convoKeys = Object.keys(streamlined); + const specAndBodyKeys = Object.keys(specAndBodies); + const keys: string[] = Array.from(new Set([...convoKeys, ...specAndBodyKeys])); + const chatHistory: string[] = []; + + for (const key of keys) { + const isCurrentIssue = key === createKey(context.payload.issue.url, context.payload.issue.number); + const block = await createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue); + chatHistory.push(block); + } + + return chatHistory.join(""); +} + +async function createContextBlockSection( context: Context, + key: string, streamlined: Record, specAndBodies: Record, - linkedPulls: Record + isCurrentIssue: boolean ) { - const convoKeys = Object.keys(streamlined); - const specAndBodyKeys = Object.keys(specAndBodies); + const comments = streamlined[key]; + const [org, repo, _issue, issue] = key.split("/"); - const curIssue = { - convo: streamlined[convoKeys[0]], - specOrBody: specAndBodies[specAndBodyKeys[0]], - }; + const issueNumber = parseInt(issue ?? _issue); + const isPull = await fetchPullRequestDiff(context, org, repo, issueNumber); + + if (!issueNumber || isNaN(issueNumber)) { + throw new Error("Issue number is not valid"); + } + + let specHeader = isPull ? `Linked Pull #${issueNumber} Request Body` : `Linked Issue #${issueNumber} Specification`; + if (isCurrentIssue) { + specHeader = isPull ? `Current Pull #${issueNumber} Request Body` : `Current Issue #${issueNumber} Specification`; + } + + const specOrBody = specAndBodies[key]; + const specOrBodyBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader)]; + + const header = isPull ? `Linked Pull #${issueNumber} Request Conversation` : `Linked Issue #${issueNumber} Conversation`; + const repoString = `${org}/${repo} #${issueNumber}`; + const diff = isPull ? await fetchPullRequestDiff(context, org, repo, issueNumber) : null; + + const block = [ + specOrBodyBlock.join(""), + createHeader(header, repoString), + createComment({ issue: parseInt(issue), repo, org, comments }), + createFooter(header), + ]; + + if (!isPull) { + return block.join(""); + } - const issueSpecBlock: string[] = [ - createHeader("Project Specification", specAndBodyKeys[0]), - createSpecOrBody(curIssue.specOrBody), - createFooter("Project Specification") - ] - - const issueCommentBlock: string[] = [ - createHeader("Issue Conversation", convoKeys[0]), - createComment({ - issue: parseInt(convoKeys[0].split("/")[2]), - repo: convoKeys[0].split("/")[1], - org: convoKeys[0].split("/")[0], - comments: curIssue.convo, - }), - createFooter("Issue Conversation") + const diffBlock = [ + createHeader("Linked Pull Request Code Diff", repoString), + diff ? diff : "No diff available", + createFooter("Linked Pull Request Code Diff"), ]; - delete convoKeys[0]; - - const linkedContextBlocks = convoKeys.map(async (key) => { - const comments = streamlined[key]; - const [org, repo, _issues, issue] = key.split("/"); - const isPull = linkedPulls[key]; - const specHeader = isPull ? `Linked Pull #${issue} Request Body` : `Linked Issue #${issue} Specification`; - - const specOrBody = specAndBodies[key]; - const specOrBodyBlock = [ - createHeader(specHeader, key), - createSpecOrBody(specOrBody), - createFooter(specHeader) - ] - - const header = isPull ? `Linked Pull #${issue} Request Conversation` : `Linked Issue #${issue} Conversation`; - const repoString = `${org}/${repo} #${issue}`; - const diff = isPull ? await fetchPullRequestDiff(context, org, repo, issue) : null; - - const block = [ - specOrBodyBlock.join(""), - createHeader(header, repoString), - createComment({ issue: parseInt(issue), repo, org, comments }), - createFooter(header) - ] - - if (!isPull) { - return block.join(""); - } - - const diffBlock = [ - createHeader("Linked Pull Request Code Diff", repoString), - diff ? diff : "No diff available", - createFooter("Linked Pull Request Code Diff") - ] - - return block.join("") + diffBlock.join(""); - }); - - return issueSpecBlock.join("") + issueCommentBlock.join("") + linkedContextBlocks.join(""); + return block.concat(diffBlock).join(""); } function createHeader(content: string, repoString: string) { @@ -85,7 +77,7 @@ function createFooter(content: string) { } function createComment(comment: StreamlinedComments) { - const comments = [] + const comments = []; for (const c of comment.comments) { comments.push(`${c.id} ${c.user}: ${c.body}\n`); } diff --git a/src/utils/issue.ts b/src/utils/issue.ts index f71a981..4be7a16 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -216,7 +216,7 @@ export function idIssueFromComment(owner?: string, comment?: string | null, para } export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { - const { logger, octokit } = context; + const { octokit } = context; try { const diff = await octokit.pulls.get({ @@ -229,7 +229,6 @@ export async function fetchPullRequestDiff(context: Context, org: string, repo: }); return diff.data as unknown as string; } catch (e) { - logger.error(`Error fetching pull request diff: `, { e }); return null; } } From 834a570a8fc0e78e1b9590582b8e01c6678bd86a Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Sun, 14 Jul 2024 19:21:08 +0100 Subject: [PATCH 21/72] refactor: optimizing --- src/handlers/ask-gpt.ts | 1 - src/utils/format-chat-history.ts | 57 ++++++-- src/utils/issue.ts | 230 ++++++++++++++++--------------- 3 files changed, 169 insertions(+), 119 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index fd68e8d..e9fcc04 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -15,7 +15,6 @@ export async function askQuestion(context: Context, question: string) { } const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); - const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); return await askGpt(context, formattedChat); diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index 1ec113f..78ea89e 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -1,7 +1,7 @@ import { ChatCompletionMessageParam } from "openai/resources"; import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; -import { fetchPullRequestDiff } from "./issue"; +import { fetchIssue, fetchPullRequestDiff } from "./issue"; import { createKey } from "../handlers/comments"; export async function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record) { @@ -11,12 +11,43 @@ export async function formatChatHistory(context: Context, streamlined: Record fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); + await Promise.allSettled(fetchPromises); - for (const linkedIssue of linkedIssues) { - const comments = linkedIssue.comments; - if (!comments) { - continue; - } - const streamed = await getAllStreamlinedComments([linkedIssue]); + const linkedIssuesKeys = linkedIssues.map((issue) => createKey(issue.url, issue.issueNumber)); + const specAndBodyKeys = Array.from(new Set([...Object.keys(specOrBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); + await processSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); - for (const [key, value] of Object.entries(streamed)) { - if (!streamlinedComments[key]) { - streamlinedComments[key] = value; - continue; - } + return { linkedIssues, specAndBodies: specOrBodies, streamlinedComments }; +} - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; - } +function dedupeStreamlinedComments(streamlinedComments: Record) { + for (const key of Object.keys(streamlinedComments)) { + streamlinedComments[key] = streamlinedComments[key].filter( + (comment: StreamlinedComment, index: number, self: StreamlinedComment[]) => index === self.findIndex((t: StreamlinedComment) => t.body === comment.body) + ); + } - if (!linkedIssue.body) { - continue; - } + return streamlinedComments; +} - await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen); +async function fetchCommentsAndHandleSpec( + params: FetchParams, + linkedIssue: LinkedIssues, + streamlinedComments: Record, + specOrBodies: Record, + seen: Set +) { + if (linkedIssue.comments) { + const streamed = await getAllStreamlinedComments([linkedIssue]); + const merged = mergeStreamlinedComments(streamlinedComments, streamed); + streamlinedComments = { ...streamlinedComments, ...merged }; } - const linkedIssuesKeys = linkedIssues.map((issue) => createKey(issue.url, issue.issueNumber)); - const specAndBodyKeys = Array.from(new Set(Object.keys(specOrBodies).concat(Object.keys(streamlinedComments)).concat(linkedIssuesKeys))); + if (linkedIssue.body) { + await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); + } +} - for (const key of specAndBodyKeys) { +async function processSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { + const commentProcessingPromises = keys.map(async (key) => { let comments = streamlinedComments[key]; - if (!comments) { - const [owner, repo, issueNumber] = key.split("/"); - await handleIssue({ - ...params, - owner, - repo, - issueNum: parseInt(issueNumber), - }, streamlinedComments) - - comments = streamlinedComments[key]; + if (!comments || comments.length === 0) { + comments = await fetchAndHandleIssue(key, params, streamlinedComments, seen); } + return Promise.all(comments.map((comment: StreamlinedComment) => handleComment(params, comment, streamlinedComments, seen))); + }); - for (const comment of comments) { - await handleComment(params, comment, streamlinedComments); + await Promise.all(commentProcessingPromises); +} + +function mergeStreamlinedComments(existingComments: Record, newComments: Record) { + if (!existingComments) { + existingComments = {}; + } + for (const [key, value] of Object.entries(newComments)) { + if (!existingComments[key]) { + existingComments[key] = []; } + + const previous = existingComments[key] || []; + existingComments[key] = [...previous, ...value]; } - return { - linkedIssues, - specAndBodies: specOrBodies, - streamlinedComments, - }; + return existingComments; +} + +async function fetchAndHandleIssue( + key: string, + params: FetchParams, + streamlinedComments: Record, + seen: Set +): Promise { + const [owner, repo, issueNumber] = splitKey(key); + await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNumber) }, streamlinedComments, seen); + return streamlinedComments[key] || []; } -async function handleIssue(params: FetchParams, streamlinedComments: Record) { - const { comments: fetchedComments, issue } = await fetchIssueComments(params); - const streamlined = await getAllStreamlinedComments([ - { - body: issue.body || "", - comments: fetchedComments, - issueNumber: issue.number, - owner: issue.repository?.owner?.login || "", - repo: issue.repository?.name || "", - url: issue.url, - }, - ]); - - for (const [key, value] of Object.entries(streamlined)) { - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; +async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen?: Set) { + if (alreadySeen && alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { + return; } + const { linkedIssues, seen, specOrBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); + const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); + await Promise.allSettled(fetchPromises); + return mergeStreamlinedComments(streamlinedComments, streamlined); } -async function handleSpec(params: FetchParams, specOrBody: string, specAndBodies: Record, key: string, seen: Set) { +async function handleSpec( + params: FetchParams, + specOrBody: string, + specAndBodies: Record, + key: string, + seen: Set, + streamlinedComments: Record +) { specAndBodies[key] = specOrBody; - const [owner, repo, issueNumber] = key.split("/"); + const [owner, repo, issueNumber] = splitKey(key); const anotherReferencedIssue = idIssueFromComment(owner, specOrBody, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); if (anotherReferencedIssue) { - const key = createKey(anotherReferencedIssue.url, anotherReferencedIssue.issueNumber); - if (!seen.has(key)) { - seen.add(key); - const issue = await fetchIssue({ - ...params, - owner: anotherReferencedIssue.owner, - repo: anotherReferencedIssue.repo, - issueNum: anotherReferencedIssue.issueNumber, - }); - const body = issue.body; - if (body) { - specAndBodies[key] = body; - } + const anotherKey = createKey(anotherReferencedIssue.url, anotherReferencedIssue.issueNumber); + if (seen.has(anotherKey)) { + return; + } + seen.add(anotherKey); + const issue = await fetchIssue({ + ...params, + owner: anotherReferencedIssue.owner, + repo: anotherReferencedIssue.repo, + issueNum: anotherReferencedIssue.issueNumber, + }); + if (issue.body) { + specAndBodies[anotherKey] = issue.body; + } + const [owner, repo, issueNum] = splitKey(anotherKey); + if (!streamlinedComments[anotherKey]) { + await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); + await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue.body || "", specAndBodies, anotherKey, seen, streamlinedComments); } } + + return specAndBodies; } -async function handleComment(params: FetchParams, comment: StreamlinedComment, streamlinedComments: Record) { +async function handleComment(params: FetchParams, comment: StreamlinedComment, streamlinedComments: Record, seen: Set) { const [, , , , owner, repo, , issueNumber] = comment.issueUrl.split("/"); const anotherReferencedIssue = idIssueFromComment(owner, comment.body, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); if (anotherReferencedIssue) { const key = createKey(anotherReferencedIssue.url); - const [owner, repo, issueNumber] = key.split("/"); + const [refOwner, refRepo, refIssueNumber] = splitKey(key); if (!streamlinedComments[key]) { - await handleIssue({ - ...params, - owner, - repo, - issueNum: parseInt(issueNumber), - }, streamlinedComments) + await handleIssue({ ...params, owner: refOwner, repo: refRepo, issueNum: parseInt(refIssueNumber) }, streamlinedComments, seen); } } } @@ -129,17 +147,8 @@ async function handleComment(params: FetchParams, comment: StreamlinedComment, s export async function fetchLinkedIssues(params: FetchParams) { const { comments, issue } = await fetchIssueComments(params); const issueKey = createKey(issue.url); - const [owner, repo, issueNumber] = issueKey.split("/"); - const linkedIssues: LinkedIssues[] = [ - { - body: issue.body || "", - comments, - issueNumber: parseInt(issueNumber), - owner, - repo, - url: issue.url, - }, - ]; + const [owner, repo, issueNumber] = splitKey(issueKey); + const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.url }]; const specOrBodies: Record = {}; specOrBodies[issueKey] = issue.body || ""; @@ -154,35 +163,39 @@ export async function fetchLinkedIssues(params: FetchParams) { } else if ("pull_request_url" in comment) { url = comment.pull_request_url; } - const linkedIssue = idIssueFromComment(url.split("/")[4], comment.body, { - repo: url.split("/")[5], - issueNum: parseInt(url.split("/")[7]), + + const key = createKey(url); + const linkedIssue = idIssueFromComment(key.split("/")[0], comment.body, { + repo: key.split("/")[1], + issueNum: parseInt(key.split("/")[2]), context: params.context, }); + if (linkedIssue) { - const key = createKey(linkedIssue.url, linkedIssue.issueNumber); - seen.add(key); + const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); + seen.add(linkedKey); + const [owner, repo, issueNumber] = splitKey(linkedKey); const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ context: params.context, - issueNum: linkedIssue.issueNumber, - owner: linkedIssue.owner, - repo: linkedIssue.repo, + issueNum: parseInt(issueNumber), + owner, + repo, }); - specOrBodies[key] = fetchedIssue.body || ""; + specOrBodies[linkedKey] = fetchedIssue.body || ""; linkedIssue.body = fetchedIssue.body || ""; linkedIssue.comments = fetchedComments; linkedIssues.push(linkedIssue); } } - return { - streamlinedComments: await getAllStreamlinedComments(linkedIssues), - linkedIssues, - specOrBodies, - seen, - }; + return { streamlinedComments: await getAllStreamlinedComments(linkedIssues), linkedIssues, specOrBodies, seen }; +} + +function splitKey(key: string): [string, string, string] { + const parts = key.split("/"); + return [parts[0], parts[1], parts[2]]; } export function idIssueFromComment(owner?: string, comment?: string | null, params?: FetchParams): LinkedIssues | null { @@ -190,7 +203,6 @@ export function idIssueFromComment(owner?: string, comment?: string | null, para return null; } - // the assumption here is that any special GitHub markdown formatting is converted to an anchor tag const urlMatch = comment.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); const hashMatch = comment.match(/#(\d+)/); From c68453093eacb8ea40ba44c50add0b22d1a8bdc0 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 18:30:28 +0100 Subject: [PATCH 22/72] chore: remove env and init tests --- src/handlers/ask-gpt.ts | 4 +- src/plugin.ts | 1 + src/types/env.ts | 10 --- src/types/index.ts | 1 - src/utils/format-chat-history.ts | 3 +- src/worker.ts | 15 +--- tests/__mocks__/db.ts | 97 ++++++++++++++++++++++- tests/__mocks__/handlers.ts | 78 +++++++++++++++++- tests/__mocks__/issue-template.ts | 55 +++++++++++++ tests/__mocks__/repo-template.ts | 11 +++ tests/main.test.ts | 127 +++++++++++++++++++++++++++--- 11 files changed, 358 insertions(+), 44 deletions(-) delete mode 100644 src/types/env.ts create mode 100644 tests/__mocks__/issue-template.ts create mode 100644 tests/__mocks__/repo-template.ts diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index e9fcc04..78f6495 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -6,10 +6,8 @@ import { createChatHistory, formatChatHistory } from "../utils/format-chat-histo import { addCommentToIssue } from "./add-comment"; export async function askQuestion(context: Context, question: string) { - const { logger } = context; - if (!question) { - logger.error(`No question provided`); + context.logger.error(`No question provided`); await addCommentToIssue(context, "No question provided", true, "error"); return; } diff --git a/src/plugin.ts b/src/plugin.ts index fbb66f6..3e50108 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -18,6 +18,7 @@ export async function plugin(inputs: PluginInputs) { octokit, logger: new Logs("info"), }; + const { logger, config: { isEnabled }, diff --git a/src/types/env.ts b/src/types/env.ts deleted file mode 100644 index 19bc3c3..0000000 --- a/src/types/env.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Type as T } from "@sinclair/typebox"; -import { StaticDecode } from "@sinclair/typebox"; -import "dotenv/config"; -import { StandardValidator } from "typebox-validators"; - -export const envSchema = T.Object({}); - -export const envValidator = new StandardValidator(envSchema); - -export type Env = StaticDecode; diff --git a/src/types/index.ts b/src/types/index.ts index 6ca5c88..4bcbbe7 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -1,3 +1,2 @@ export * from "./context"; -export * from "./env"; export * from "./plugin-inputs"; diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index 78ea89e..5c326ff 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -85,7 +85,6 @@ async function createContextBlockSection( const header = getCorrectHeaderString(isPull, issueNumber, isCurrentIssue, true); const repoString = `${org}/${repo} #${issueNumber}`; - const diff = isPull ? await fetchPullRequestDiff(context, org, repo, issueNumber) : null; const block = [ specOrBodyBlock.join(""), @@ -100,7 +99,7 @@ async function createContextBlockSection( const diffBlock = [ createHeader("Linked Pull Request Code Diff", repoString), - diff ? diff : "No diff available", + isPull ? isPull : "No diff available", createFooter("Linked Pull Request Code Diff"), ]; diff --git a/src/worker.ts b/src/worker.ts index 0f76fd1..aaa5f9b 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -1,9 +1,9 @@ import { Value } from "@sinclair/typebox/value"; import { plugin } from "./plugin"; -import { Env, envValidator, pluginSettingsSchema, pluginSettingsValidator } from "./types"; +import { pluginSettingsSchema, pluginSettingsValidator } from "./types"; export default { - async fetch(request: Request, env: Env): Promise { + async fetch(request: Request): Promise { try { if (request.method !== "POST") { return new Response(JSON.stringify({ error: `Only POST requests are supported.` }), { @@ -33,17 +33,6 @@ export default { headers: { "content-type": "application/json" }, }); } - if (!envValidator.test(env)) { - const errors: string[] = []; - for (const error of envValidator.errors(env)) { - console.error(error); - errors.push(`${error.path}: ${error.message}`); - } - return new Response(JSON.stringify({ error: `Error: "Invalid environment provided. ${errors.join("; ")}"` }), { - status: 400, - headers: { "content-type": "application/json" }, - }); - } webhookPayload.settings = settings; await plugin(webhookPayload); diff --git a/tests/__mocks__/db.ts b/tests/__mocks__/db.ts index 7df690c..f8bb87d 100644 --- a/tests/__mocks__/db.ts +++ b/tests/__mocks__/db.ts @@ -1,5 +1,5 @@ // cSpell:disable -import { factory, primaryKey } from "@mswjs/data"; +import { factory, nullable, primaryKey } from "@mswjs/data"; /** * Creates an object that can be used as a db to persist data within tests @@ -7,6 +7,101 @@ import { factory, primaryKey } from "@mswjs/data"; export const db = factory({ users: { id: primaryKey(Number), + login: String, + }, + issue: { + id: primaryKey(Number), + assignees: Array, + html_url: String, + repository_url: String, + state: String, + owner: String, + repo: String, + labels: Array, + author_association: String, + body: nullable(String), + closed_at: nullable(Date), + created_at: nullable(Date), + comments: Number, + comments_url: String, + events_url: String, + labels_url: String, + locked: Boolean, + node_id: String, + title: String, + number: Number, + updated_at: Date, + url: String, + user: nullable(Object), + milestone: nullable(Object), + assignee: nullable({ + avatar_url: String, + email: nullable(String), + events_url: String, + followers_url: String, + following_url: String, + gists_url: String, + gravatar_id: nullable(String), + html_url: String, + id: Number, + login: String, + name: nullable(String), + node_id: String, + organizations_url: String, + received_events_url: String, + repos_url: String, + site_admin: Boolean, + starred_at: String, + starred_url: String, + subscriptions_url: String, + type: String, + url: String, + }), + }, + repo: { + id: primaryKey(Number), + html_url: String, name: String, + url: String, + owner: { + login: String, + id: Number, + }, + issues: Array, }, + pull: { + id: primaryKey(Number), + html_url: String, + number: Number, + state: String, + title: String, + user: Object, + body: nullable(String), + repo: String, + owner: String, + author: Object, + assignees: Array, + requested_reviewers: Array, + requested_teams: Array, + labels: Array, + draft: Boolean, + created_at: Date, + updated_at: Date, + closed_at: nullable(Date), + merged_at: nullable(Date), + merge_commit_sha: nullable(String), + assignee: nullable(Object), + milestone: nullable(Object), + head: Object, + base: Object, + _links: Object, + author_association: String, + }, + comments: { + id: primaryKey(Number), + issue_number: Number, + owner: String, + repo: String, + comments: Array, + } }); diff --git a/tests/__mocks__/handlers.ts b/tests/__mocks__/handlers.ts index 0d31c3c..d6ca8c3 100644 --- a/tests/__mocks__/handlers.ts +++ b/tests/__mocks__/handlers.ts @@ -1,11 +1,85 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +/* eslint-disable sonarjs/no-duplicate-string */ import { http, HttpResponse } from "msw"; import { db } from "./db"; +import issueTemplate from "./issue-template"; /** * Intercepts the routes and returns a custom payload */ export const handlers = [ - http.get("https://api.ubiquity.com/users", () => { - return HttpResponse.json(db.users.getAll()); + http.get("https://api.openai.com/v1/chat/completions", (params: any) => { + const { messages } = params.body as { messages: string[] }; + + console.log("messages", messages); + + const chat = messages.join("\n"); + + const answer = `This is a mock answer for the chat: ${chat}`; + + return HttpResponse.json({ + choices: [ + { + text: answer, + }, + ], + }); + }), + + // GET https://api.github.com/repos/ubiquity/test-repo/issues/1 + http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number", ({ params: { owner, repo, issue_number } }) => { + return HttpResponse.json(db.issue.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(issue_number) } } })); + }), + + // get repo + http.get("https://api.github.com/repos/:owner/:repo", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { + const item = db.repo.findFirst({ where: { name: { equals: repo }, owner: { login: { equals: owner } } } }); + if (!item) { + return new HttpResponse(null, { status: 404 }); + } + return HttpResponse.json(item); + }), + // get issue + http.get("https://api.github.com/repos/:owner/:repo/issues", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { + return HttpResponse.json(db.issue.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })); + }), + // create issue + http.post("https://api.github.com/repos/:owner/:repo/issues", () => { + const id = db.issue.count() + 1; + const newItem = { ...issueTemplate, id }; + db.issue.create(newItem); + return HttpResponse.json(newItem); + }), + // get repo issues + http.get("https://api.github.com/orgs/:org/repos", ({ params: { org } }: { params: { org: string } }) => { + return HttpResponse.json(db.repo.findMany({ where: { owner: { login: { equals: org } } } })); + }), + // add comment to issue + http.post("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number } }) => { + return HttpResponse.json({ owner, repo, issue_number }); + }), + // list pull requests + http.get("https://api.github.com/repos/:owner/:repo/pulls", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { + return HttpResponse.json(db.pull.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })); + }), + // update a pull request + http.patch("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number } }) => { + return HttpResponse.json({ owner, repo, pull_number }); + }), + // issues list for repo + http.get("https://api.github.com/repos/:owner/:repo/issues", ({ params: { owner, repo } }) => { + return HttpResponse.json(db.issue.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string } } })); + }), + // list issue comments + http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number } }) => { + return HttpResponse.json(db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(issue_number) } } })); + }), + //list review comments + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/comments", ({ params: { owner, repo, pull_number } }) => { + return HttpResponse.json(db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(pull_number) } } })); + }), + // octokit.pulls.get + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number } }) => { + return HttpResponse.json(db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pull_number) } } })); }), ]; diff --git a/tests/__mocks__/issue-template.ts b/tests/__mocks__/issue-template.ts new file mode 100644 index 0000000..c9a74a5 --- /dev/null +++ b/tests/__mocks__/issue-template.ts @@ -0,0 +1,55 @@ +export default { + assignee: { + login: "", + avatar_url: "", + email: "undefined", + events_url: "", + followers_url: "", + following_url: "", + gists_url: "", + gravatar_id: null, + html_url: "", + id: 1, + name: "undefined", + node_id: "", + organizations_url: "", + received_events_url: "", + repos_url: "", + site_admin: false, + starred_at: "", + starred_url: "", + subscriptions_url: "", + type: "", + url: "", + }, + author_association: "NONE", + closed_at: null, + comments: 0, + comments_url: "", + created_at: new Date().toISOString(), + events_url: "", + html_url: "https://github.com/ubiquity/test-repo/issues/1", + id: 1, + labels_url: "", + locked: false, + milestone: null, + node_id: "1", + owner: "ubiquity", + number: 1, + repository_url: "https://github.com/ubiquity/test-repo", + state: "open", + title: "issue", + updated_at: "", + url: "https://api.github.com/repos/ubiquity/test-repo/issues/1", + user: null, + repo: "test-repo", + labels: [ + { + name: "Price: 200 USD", + }, + { + name: "Time: 1h", + }, + ], + body: "body", +}; diff --git a/tests/__mocks__/repo-template.ts b/tests/__mocks__/repo-template.ts new file mode 100644 index 0000000..7bf7be7 --- /dev/null +++ b/tests/__mocks__/repo-template.ts @@ -0,0 +1,11 @@ +export default { + id: 1, + html_url: "", + url: "https://api.github.com/repos/ubiquity/test-repo", + name: "test-repo", + owner: { + login: "ubiquity", + id: 1, + }, + issues: [], +}; diff --git a/tests/main.test.ts b/tests/main.test.ts index 7967004..18d8459 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -1,22 +1,125 @@ import { db } from "./__mocks__/db"; import { server } from "./__mocks__/node"; import usersGet from "./__mocks__/users-get.json"; -import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it } from "@jest/globals"; +import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it, jest } from "@jest/globals"; +import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Context, SupportedEventsU } from "../src/types"; +import { drop } from "@mswjs/data"; +import issueTemplate from "./__mocks__/issue-template"; +import repoTemplate from "./__mocks__/repo-template"; +import { askQuestion } from "../src/handlers/ask-gpt"; -beforeAll(() => server.listen()); -afterEach(() => server.resetHandlers()); +type Comments = { + id: number; + user: string; + body: string; +}[]; + + +const octokit = jest.requireActual("@octokit/rest") as any; +jest.mock("openai", () => { + return { + OpenAi: class OpenAi { + constructor() { + return; + } + async chat() { + return { + choices: [ + { + text: "This is a mock answer for the chat", + }, + ], + }; + } + }, + }; +}); + +beforeAll(() => { + server.listen(); +}); +afterEach(() => { + drop(db); + server.resetHandlers(); +}); afterAll(() => server.close()); -describe("User tests", () => { - beforeEach(() => { - for (const item of usersGet) { - db.users.create(item); - } + +// TESTS + +describe("Ask plugin tests", () => { + beforeEach(async () => { + await setupTests(); }); - it("Should fetch all the users", async () => { - const res = await fetch("https://api.ubiquity.com/users"); - const data = await res.json(); - expect(data).toMatchObject(usersGet); + it("should ask GPT a question", async () => { + const ctx = createContext(); + const comments: Comments = [ + { + id: 1, + user: "ubiquity", + body: "This is a test comment", + }, + ]; + + createComments("ubiquity", "test-repo", 1, 1, comments); + const res = await askQuestion(ctx, "What is pi?"); + + expect(res).toBeDefined(); + + expect(res?.answer).toBe("This is a mock answer for the chat"); }); + }); + +// HELPERS + +async function setupTests() { + for (const item of usersGet) { + db.users.create(item); + } + + db.repo.create({ + ...repoTemplate, + }); + + db.issue.create({ + ...issueTemplate, + }); +} + + +function createComments(owner: string, repo: string, id: number, issue_number: number, comments: Comments,) { + db.comments.create({ + id, + issue_number, + owner, + repo, + comments, + }); +} + +function createContext(body = "/gpt what is pi?", isEnabled = true, depth = 5) { + const ctx = { + payload: { + issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["issue"], + sender: db.users.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["sender"], + repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["repository"], + comment: { body } as unknown as Context["payload"]["comment"], + action: "created" as string, + installation: { id: 1 } as unknown as Context["payload"]["installation"], + organization: { login: "ubiquity" } as unknown as Context["payload"]["organization"], + }, + logger: new Logs("debug"), + config: { + isEnabled, + openAi_apiKey: "test", + linkedIssueFetchDepth: depth, + }, + octokit: new octokit.Octokit(), + eventName: "issue_comment.created" as SupportedEventsU, + } as unknown as Context; + + return ctx; +} \ No newline at end of file From c08b9d0aebaae0baf59dd9c7b53e8412ff97526c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 21:28:09 +0100 Subject: [PATCH 23/72] chore: test env setup --- .cspell.json | 1 + tests/__mocks__/db.ts | 12 +++- tests/__mocks__/handlers.ts | 36 ++++++----- tests/__mocks__/users-get.json | 4 +- tests/main.test.ts | 111 +++++++++++++++++++-------------- 5 files changed, 99 insertions(+), 65 deletions(-) diff --git a/.cspell.json b/.cspell.json index b5f9628..b201567 100644 --- a/.cspell.json +++ b/.cspell.json @@ -5,6 +5,7 @@ "useGitignore": true, "language": "en", "words": [ + "mswjs", "Nektos", "dataurl", "devpool", diff --git a/tests/__mocks__/db.ts b/tests/__mocks__/db.ts index f8bb87d..4e090e3 100644 --- a/tests/__mocks__/db.ts +++ b/tests/__mocks__/db.ts @@ -99,9 +99,17 @@ export const db = factory({ }, comments: { id: primaryKey(Number), + node_id: String, + url: String, + issue_url: nullable(String), + pull_request_url: nullable(String), + body: nullable(String), + html_url: String, + user: { + login: String, + }, issue_number: Number, owner: String, repo: String, - comments: Array, - } + }, }); diff --git a/tests/__mocks__/handlers.ts b/tests/__mocks__/handlers.ts index d6ca8c3..3d4536e 100644 --- a/tests/__mocks__/handlers.ts +++ b/tests/__mocks__/handlers.ts @@ -8,27 +8,29 @@ import issueTemplate from "./issue-template"; * Intercepts the routes and returns a custom payload */ export const handlers = [ - http.get("https://api.openai.com/v1/chat/completions", (params: any) => { - const { messages } = params.body as { messages: string[] }; - - console.log("messages", messages); - - const chat = messages.join("\n"); - - const answer = `This is a mock answer for the chat: ${chat}`; + http.post("https://api.openai.com/v1/chat/completions", () => { + const answer = `This is a mock answer for the chat`; return HttpResponse.json({ + usage: { + completion_tokens: 150, + prompt_tokens: 1000, + total_tokens: 1150, + }, choices: [ { - text: answer, + message: { + content: answer, + }, }, ], }); }), - // GET https://api.github.com/repos/ubiquity/test-repo/issues/1 http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number", ({ params: { owner, repo, issue_number } }) => { - return HttpResponse.json(db.issue.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(issue_number) } } })); + return HttpResponse.json( + db.issue.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(issue_number) } } }) + ); }), // get repo @@ -72,14 +74,20 @@ export const handlers = [ }), // list issue comments http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number } }) => { - return HttpResponse.json(db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(issue_number) } } })); + return HttpResponse.json( + db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(issue_number) } } }) + ); }), //list review comments http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/comments", ({ params: { owner, repo, pull_number } }) => { - return HttpResponse.json(db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(pull_number) } } })); + return HttpResponse.json( + db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(pull_number) } } }) + ); }), // octokit.pulls.get http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number } }) => { - return HttpResponse.json(db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pull_number) } } })); + return HttpResponse.json( + db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pull_number) } } }) + ); }), ]; diff --git a/tests/__mocks__/users-get.json b/tests/__mocks__/users-get.json index 59f0200..8681c7b 100644 --- a/tests/__mocks__/users-get.json +++ b/tests/__mocks__/users-get.json @@ -1,10 +1,10 @@ [ { "id": 1, - "name": "user1" + "login": "ubiquity" }, { "id": 2, - "name": "user2" + "login": "user2" } ] diff --git a/tests/main.test.ts b/tests/main.test.ts index 18d8459..229e281 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -9,32 +9,25 @@ import issueTemplate from "./__mocks__/issue-template"; import repoTemplate from "./__mocks__/repo-template"; import { askQuestion } from "../src/handlers/ask-gpt"; -type Comments = { +type Comment = { id: number; - user: string; + user: { + login: string; + }; body: string; -}[]; - - + url: string; + html_url: string; + owner: string; + repo: string; + issue_number: number; + issue_url?: string; + pull_request_url?: string; +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any const octokit = jest.requireActual("@octokit/rest") as any; -jest.mock("openai", () => { - return { - OpenAi: class OpenAi { - constructor() { - return; - } - async chat() { - return { - choices: [ - { - text: "This is a mock answer for the chat", - }, - ], - }; - } - }, - }; -}); +// eslint-disable-next-line @typescript-eslint/no-explicit-any +jest.requireActual("openai") as any; beforeAll(() => { server.listen(); @@ -45,7 +38,6 @@ afterEach(() => { }); afterAll(() => server.close()); - // TESTS describe("Ask plugin tests", () => { @@ -55,26 +47,56 @@ describe("Ask plugin tests", () => { it("should ask GPT a question", async () => { const ctx = createContext(); - const comments: Comments = [ - { - id: 1, - user: "ubiquity", - body: "This is a test comment", - }, - ]; - - createComments("ubiquity", "test-repo", 1, 1, comments); + const comments = [transformCommentTemplate(1, 1, "First comment", "ubiquity", "test-repo", true)]; + + console.log("comments", comments); + createComments(comments); const res = await askQuestion(ctx, "What is pi?"); expect(res).toBeDefined(); expect(res?.answer).toBe("This is a mock answer for the chat"); }); - }); // HELPERS +function transformCommentTemplate(commentId: number, issueNumber: number, body: string, owner: string, repo: string, isIssue = true) { + const COMMENT_TEMPLATE = { + id: 1, + user: { + login: "ubiquity", + }, + body: "What is pi?", + url: "https://api.github.com/repos/ubiquity/test-repo/issues/comments/1", + html_url: "https://api.github.com/repos/ubiquity/test-repo/issues/1", + owner: "ubiquity", + repo: "test-repo", + issue_number: 1, + }; + + const comment: Comment = { + id: commentId, + user: { + login: COMMENT_TEMPLATE.user.login, + }, + body: body, + url: COMMENT_TEMPLATE.url.replace("1", issueNumber.toString()), + html_url: COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()), + owner: owner, + repo: repo, + issue_number: issueNumber, + }; + + if (isIssue) { + comment.issue_url = COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()); + } else { + comment.pull_request_url = COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()); + } + + return comment; +} + async function setupTests() { for (const item of usersGet) { db.users.create(item); @@ -89,19 +111,16 @@ async function setupTests() { }); } - -function createComments(owner: string, repo: string, id: number, issue_number: number, comments: Comments,) { - db.comments.create({ - id, - issue_number, - owner, - repo, - comments, - }); +function createComments(comments: Comment[]) { + for (const comment of comments) { + db.comments.create({ + ...comment, + }); + } } function createContext(body = "/gpt what is pi?", isEnabled = true, depth = 5) { - const ctx = { + return { payload: { issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["issue"], sender: db.users.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["sender"], @@ -120,6 +139,4 @@ function createContext(body = "/gpt what is pi?", isEnabled = true, depth = 5) { octokit: new octokit.Octokit(), eventName: "issue_comment.created" as SupportedEventsU, } as unknown as Context; - - return ctx; -} \ No newline at end of file +} From 93e9cd4e6c1947aeb99515c7753c4653b67dac6d Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 21:35:27 +0100 Subject: [PATCH 24/72] refactor: handle PluginInputs separately for better tests --- src/main.ts | 4 ++-- src/plugin.ts | 12 ++++++++---- src/worker.ts | 4 ++-- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/src/main.ts b/src/main.ts index 15db8fb..78b7e44 100644 --- a/src/main.ts +++ b/src/main.ts @@ -3,7 +3,7 @@ import * as github from "@actions/github"; import { Octokit } from "@octokit/rest"; import { Value } from "@sinclair/typebox/value"; import { pluginSettingsSchema, PluginInputs, pluginSettingsValidator } from "./types"; -import { plugin } from "./plugin"; +import { setupAndRun } from "./plugin"; /** * How a GitHub action executes the plugin. @@ -26,7 +26,7 @@ export async function run() { ref: payload.ref, }; - await plugin(inputs); + await setupAndRun(inputs); return returnDataToKernel(inputs.authToken, inputs.stateId, {}); } diff --git a/src/plugin.ts b/src/plugin.ts index 3e50108..e3143aa 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -5,10 +5,7 @@ import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; import { Logs } from "@ubiquity-dao/ubiquibot-logger"; -/** - * How a worker executes the plugin. - */ -export async function plugin(inputs: PluginInputs) { +export async function setupAndRun(inputs: PluginInputs) { const octokit = new Octokit({ auth: inputs.authToken }); const context: Context = { @@ -19,6 +16,13 @@ export async function plugin(inputs: PluginInputs) { logger: new Logs("info"), }; + return await plugin(context); +} + +/** + * How a worker executes the plugin. + */ +export async function plugin(context: Context) { const { logger, config: { isEnabled }, diff --git a/src/worker.ts b/src/worker.ts index aaa5f9b..b0ec7d9 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -1,5 +1,5 @@ import { Value } from "@sinclair/typebox/value"; -import { plugin } from "./plugin"; +import { setupAndRun } from "./plugin"; import { pluginSettingsSchema, pluginSettingsValidator } from "./types"; export default { @@ -35,7 +35,7 @@ export default { } webhookPayload.settings = settings; - await plugin(webhookPayload); + await setupAndRun(webhookPayload); return new Response(JSON.stringify("OK"), { status: 200, headers: { "content-type": "application/json" } }); } catch (error) { return handleUncaughtError(error); From 042bcc0cb965fa29beed778f3e6040afee649294 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 21:52:30 +0100 Subject: [PATCH 25/72] chore: setup tests --- tests/__mocks__/db.ts | 1 + tests/main.test.ts | 83 +++++++++++++++++++++++++++++++++++++------ 2 files changed, 74 insertions(+), 10 deletions(-) diff --git a/tests/__mocks__/db.ts b/tests/__mocks__/db.ts index 4e090e3..9f25606 100644 --- a/tests/__mocks__/db.ts +++ b/tests/__mocks__/db.ts @@ -107,6 +107,7 @@ export const db = factory({ html_url: String, user: { login: String, + type: String, }, issue_number: Number, owner: String, diff --git a/tests/main.test.ts b/tests/main.test.ts index 229e281..dee2011 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -8,11 +8,16 @@ import { drop } from "@mswjs/data"; import issueTemplate from "./__mocks__/issue-template"; import repoTemplate from "./__mocks__/repo-template"; import { askQuestion } from "../src/handlers/ask-gpt"; +import { plugin } from "../src/plugin"; + +const TEST_QUESTION = "What is pi?"; +const TEST_SLASH_COMMAND = "/gpt what is pi?"; type Comment = { id: number; user: { login: string; + type: string; }; body: string; url: string; @@ -46,17 +51,72 @@ describe("Ask plugin tests", () => { }); it("should ask GPT a question", async () => { - const ctx = createContext(); - const comments = [transformCommentTemplate(1, 1, "First comment", "ubiquity", "test-repo", true)]; - - console.log("comments", comments); - createComments(comments); - const res = await askQuestion(ctx, "What is pi?"); + const ctx = createContext(TEST_SLASH_COMMAND); + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + const res = await askQuestion(ctx, TEST_QUESTION); expect(res).toBeDefined(); expect(res?.answer).toBe("This is a mock answer for the chat"); }); + + it("should not ask GPT a question if plugin is disabled", async () => { + const ctx = createContext(TEST_SLASH_COMMAND, false); + const infoSpy = jest.spyOn(ctx.logger, "info"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + const res = await plugin(ctx); + + expect(res).toBeUndefined(); + expect(infoSpy).toHaveBeenCalledWith("Plugin is disabled. Skipping."); + }); + + it("should not ask GPT a question if comment is from a bot", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const infoSpy = jest.spyOn(ctx.logger, "info"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + if (!ctx.payload.comment.user) return; + ctx.payload.comment.user.type = "Bot"; + const res = await plugin(ctx); + + expect(res).toBeUndefined(); + expect(infoSpy).toHaveBeenCalledWith("Comment is from a bot. Skipping."); + }); + + it("should not ask GPT a question if comment does not start with /gpt", async () => { + const ctx = createContext(TEST_QUESTION); + const infoSpy = jest.spyOn(ctx.logger, "info"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + const res = await plugin(ctx); + + expect(res).toBeUndefined(); + expect(infoSpy).toHaveBeenCalledWith("Comment does not start with /gpt. Skipping."); + }); + + it("should not ask GPT a question if no question is provided", async () => { + const ctx = createContext("/gpt"); + const errorSpy = jest.spyOn(ctx.logger, "error"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + const res = await plugin(ctx); + + expect(res).toBeUndefined(); + expect(errorSpy).toHaveBeenCalledWith("No question provided"); + }); + + it("should not ask GPT a question if no OpenAI API key is provided", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const errorSpy = jest.spyOn(ctx.logger, "error"); + + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + ctx.config.openAi_apiKey = ""; + const res = await plugin(ctx); + + expect(res).toBeUndefined(); + expect(errorSpy).toHaveBeenCalledWith("No OpenAI API Key provided"); + }); }); // HELPERS @@ -66,8 +126,9 @@ function transformCommentTemplate(commentId: number, issueNumber: number, body: id: 1, user: { login: "ubiquity", + type: "User", }, - body: "What is pi?", + body: TEST_QUESTION, url: "https://api.github.com/repos/ubiquity/test-repo/issues/comments/1", html_url: "https://api.github.com/repos/ubiquity/test-repo/issues/1", owner: "ubiquity", @@ -79,6 +140,7 @@ function transformCommentTemplate(commentId: number, issueNumber: number, body: id: commentId, user: { login: COMMENT_TEMPLATE.user.login, + type: "User", }, body: body, url: COMMENT_TEMPLATE.url.replace("1", issueNumber.toString()), @@ -119,13 +181,14 @@ function createComments(comments: Comment[]) { } } -function createContext(body = "/gpt what is pi?", isEnabled = true, depth = 5) { +function createContext(body = TEST_SLASH_COMMAND, isEnabled = true, depth = 5) { + const user = db.users.findFirst({ where: { id: { equals: 1 } } }); return { payload: { issue: db.issue.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["issue"], - sender: db.users.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["sender"], + sender: user, repository: db.repo.findFirst({ where: { id: { equals: 1 } } }) as unknown as Context["payload"]["repository"], - comment: { body } as unknown as Context["payload"]["comment"], + comment: { body, user: user } as unknown as Context["payload"]["comment"], action: "created" as string, installation: { id: 1 } as unknown as Context["payload"]["installation"], organization: { login: "ubiquity" } as unknown as Context["payload"]["organization"], From 6ce964de9aa44cb7af4f078c89ca06db5518d7c5 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:05:04 +0100 Subject: [PATCH 26/72] chore: chat history and linked context tests --- src/utils/format-chat-history.ts | 4 + tests/__mocks__/issue-template.ts | 2 +- tests/main.test.ts | 138 +++++++++++++++++++++++++++--- 3 files changed, 133 insertions(+), 11 deletions(-) diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index 5c326ff..b7d1513 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -116,6 +116,10 @@ function createFooter(content: string) { function createComment(comment: StreamlinedComments) { const comments = []; + + // filter dupes + comment.comments = comment.comments.filter((c, i, a) => a.findIndex((cc) => cc.id === c.id) === i); + for (const c of comment.comments) { comments.push(`${c.id} ${c.user}: ${c.body}\n`); } diff --git a/tests/__mocks__/issue-template.ts b/tests/__mocks__/issue-template.ts index c9a74a5..d8f682c 100644 --- a/tests/__mocks__/issue-template.ts +++ b/tests/__mocks__/issue-template.ts @@ -51,5 +51,5 @@ export default { name: "Time: 1h", }, ], - body: "body", + body: "This is a demo spec for a demo task just perfect for testing.", }; diff --git a/tests/main.test.ts b/tests/main.test.ts index dee2011..a1373a2 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -12,6 +12,7 @@ import { plugin } from "../src/plugin"; const TEST_QUESTION = "What is pi?"; const TEST_SLASH_COMMAND = "/gpt what is pi?"; +const LOG_CALLER = "_Logs."; type Comment = { id: number; @@ -65,9 +66,8 @@ describe("Ask plugin tests", () => { const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - const res = await plugin(ctx); + await plugin(ctx); - expect(res).toBeUndefined(); expect(infoSpy).toHaveBeenCalledWith("Plugin is disabled. Skipping."); }); @@ -78,9 +78,8 @@ describe("Ask plugin tests", () => { createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); if (!ctx.payload.comment.user) return; ctx.payload.comment.user.type = "Bot"; - const res = await plugin(ctx); + await plugin(ctx); - expect(res).toBeUndefined(); expect(infoSpy).toHaveBeenCalledWith("Comment is from a bot. Skipping."); }); @@ -89,9 +88,8 @@ describe("Ask plugin tests", () => { const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - const res = await plugin(ctx); + await plugin(ctx); - expect(res).toBeUndefined(); expect(infoSpy).toHaveBeenCalledWith("Comment does not start with /gpt. Skipping."); }); @@ -100,9 +98,8 @@ describe("Ask plugin tests", () => { const errorSpy = jest.spyOn(ctx.logger, "error"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - const res = await plugin(ctx); + await plugin(ctx); - expect(res).toBeUndefined(); expect(errorSpy).toHaveBeenCalledWith("No question provided"); }); @@ -112,11 +109,118 @@ describe("Ask plugin tests", () => { createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); ctx.config.openAi_apiKey = ""; - const res = await plugin(ctx); + await plugin(ctx); - expect(res).toBeUndefined(); expect(errorSpy).toHaveBeenCalledWith("No OpenAI API Key provided"); }); + + it("should construct the chat history correctly", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const infoSpy = jest.spyOn(ctx.logger, "info"); + createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); + await plugin(ctx); + + expect(infoSpy).toHaveBeenCalledTimes(3); + + const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === + +This is a demo spec for a demo task just perfect for testing. +=== End Current Issue #1 Specification === + +=== Current Issue #1 Conversation === ubiquity/test-repo #1 === + +1 ubiquity: What is pi? +=== End Current Issue #1 Conversation ===\n +`; + + expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: what is pi?"); + expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { + caller: LOG_CALLER, + chat: [ + { + role: "system", + content: + "You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests.\n Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements.\n The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked.\n Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.", + }, + { + role: "user", + content: prompt, + }, + ], + }); + + expect(infoSpy).toHaveBeenNthCalledWith(3, "Answer: This is a mock answer for the chat", { + caller: LOG_CALLER, + tokenUsage: { + input: 1000, + output: 150, + total: 1150, + }, + }); + }); + + it("should collect the linked issues correctly", async () => { + const ctx = createContext(TEST_SLASH_COMMAND); + const infoSpy = jest.spyOn(ctx.logger, "info"); + createComments([ + transformCommentTemplate(1, 1, "More context here #2", "ubiquity", "test-repo", true), + transformCommentTemplate(2, 1, TEST_QUESTION, "ubiquity", "test-repo", true), + transformCommentTemplate(3, 2, "More context here #3", "ubiquity", "test-repo", true), + transformCommentTemplate(4, 3, "Just a comment", "ubiquity", "test-repo", true), + ]); + + await plugin(ctx); + + expect(infoSpy).toHaveBeenCalledTimes(3); + + expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: what is pi?"); + + const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === + +This is a demo spec for a demo task just perfect for testing. +=== End Current Issue #1 Specification === + +=== Current Issue #1 Conversation === ubiquity/test-repo #1 === + +1 ubiquity: More context here #2 +2 ubiquity: What is pi? +=== End Current Issue #1 Conversation === + +=== Linked Issue #2 Specification === ubiquity/test-repo/2 === + +Related to issue #3 +=== End Linked Issue #2 Specification === + +=== Linked Issue #2 Conversation === ubiquity/test-repo #2 === + +3 ubiquity: More context here #3 +=== End Linked Issue #2 Conversation === + +=== Linked Issue #3 Specification === ubiquity/test-repo/3 === + +Just another issue +=== End Linked Issue #3 Specification === + +=== Linked Issue #3 Conversation === ubiquity/test-repo #3 === + +4 ubiquity: Just a comment +=== End Linked Issue #3 Conversation ===\n +`; + + expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { + caller: LOG_CALLER, + chat: [ + { + role: "system", + content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests.\n Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements.\n The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked.\n Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, + }, + { + role: "user", + content: prompt, + }, + ], + }); + }); }); // HELPERS @@ -171,6 +275,20 @@ async function setupTests() { db.issue.create({ ...issueTemplate, }); + + db.issue.create({ + ...issueTemplate, + id: 2, + number: 2, + body: "Related to issue #3", + }); + + db.issue.create({ + ...issueTemplate, + id: 3, + number: 3, + body: "Just another issue", + }); } function createComments(comments: Comment[]) { From 64bf785626c11d6c818f716b6f958c10e3a4038f Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:12:55 +0100 Subject: [PATCH 27/72] chore: remove depth --- src/types/plugin-inputs.ts | 1 - tests/main.test.ts | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index ca3e163..03a608e 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -22,7 +22,6 @@ export interface PluginInputs Date: Tue, 23 Jul 2024 23:17:34 +0100 Subject: [PATCH 28/72] chore: diff comments from logs --- src/handlers/add-comment.ts | 12 +----------- src/handlers/ask-gpt.ts | 12 ++++++------ src/plugin.ts | 4 ++-- tests/main.test.ts | 2 +- 4 files changed, 10 insertions(+), 20 deletions(-) diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index d8410de..a6d676b 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -1,20 +1,10 @@ import { Context } from "../types/context"; -const diffStyles = { - warning: "```diff\n! ", - error: "```diff\n- ", - success: "```diff\n+ ", - info: "```diff\n# ", -}; -export async function addCommentToIssue(context: Context, message: string, diff = false, diffStyle?: keyof typeof diffStyles) { +export async function addCommentToIssue(context: Context, message: string) { const { payload } = context; const issueNumber = payload.issue.number; - if (diff && diffStyle) { - message = `${diffStyles[diffStyle]}${message}\n\`\`\``; - } - try { await context.octokit.issues.createComment({ owner: payload.repository.owner.login, diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 78f6495..1c96b45 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -7,8 +7,8 @@ import { addCommentToIssue } from "./add-comment"; export async function askQuestion(context: Context, question: string) { if (!question) { - context.logger.error(`No question provided`); - await addCommentToIssue(context, "No question provided", true, "error"); + const log = context.logger.error(`No question provided`); + await addCommentToIssue(context, log?.logMessage.diff as string); return; } @@ -25,8 +25,8 @@ export async function askGpt(context: Context, formattedChat: string) { } = context; if (!openAi_apiKey) { - logger.error(`No OpenAI API Key provided`); - await addCommentToIssue(context, "No OpenAI API Key detected!", true, "error"); // TOO confirm correct style here + const log = logger.error(`No OpenAI API Key detected!`); + await addCommentToIssue(context, log?.logMessage.diff as string); // TOO confirm correct style here return; } @@ -43,8 +43,8 @@ export async function askGpt(context: Context, formattedChat: string) { }); if (!res.choices) { - logger.error(`No response from OpenAI`); - await addCommentToIssue(context, "No response from OpenAI", true, "error"); + const log = logger.error(`No response from OpenAI`); + await addCommentToIssue(context, log?.logMessage.diff as string); return; } diff --git a/src/plugin.ts b/src/plugin.ts index e3143aa..486a596 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -42,8 +42,8 @@ export async function plugin(context: Context) { } if (!isEnabled) { - logger.info("Plugin is disabled. Skipping."); - await addCommentToIssue(context, "The /gpt command is disabled. Enable it in the plugin settings.", true, "warning"); + const log = logger.info("The /gpt command is disabled. Enable it in the plugin settings."); + await addCommentToIssue(context, log?.logMessage.diff as string); return; } diff --git a/tests/main.test.ts b/tests/main.test.ts index 64086a2..b606cc0 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -68,7 +68,7 @@ describe("Ask plugin tests", () => { createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); await plugin(ctx); - expect(infoSpy).toHaveBeenCalledWith("Plugin is disabled. Skipping."); + expect(infoSpy).toHaveBeenCalledWith("The /gpt command is disabled. Enable it in the plugin settings."); }); it("should not ask GPT a question if comment is from a bot", async () => { From 053856ece3f621dfff581c5a7514386d73c47c26 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:21:31 +0100 Subject: [PATCH 29/72] chore: fix test --- src/handlers/add-comment.ts | 1 - tests/main.test.ts | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index a6d676b..56068f9 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -1,6 +1,5 @@ import { Context } from "../types/context"; - export async function addCommentToIssue(context: Context, message: string) { const { payload } = context; const issueNumber = payload.issue.number; diff --git a/tests/main.test.ts b/tests/main.test.ts index b606cc0..8b6333b 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -111,7 +111,8 @@ describe("Ask plugin tests", () => { ctx.config.openAi_apiKey = ""; await plugin(ctx); - expect(errorSpy).toHaveBeenCalledWith("No OpenAI API Key provided"); + expect(errorSpy).toHaveBeenNthCalledWith(1, "No OpenAI API Key detected!"); + expect(errorSpy).toHaveBeenNthCalledWith(2, "No response from OpenAI"); }); it("should construct the chat history correctly", async () => { From 7bd0557990cea6c618c4ffd4732be6d89417ceef Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:24:37 +0100 Subject: [PATCH 30/72] ci: knip --- package.json | 5 ++--- src/types/github.ts | 2 -- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 1f50699..4d1ceef 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "knip": "knip --config .github/knip.ts", "knip-ci": "knip --no-exit-code --reporter json --config .github/knip.ts", "prepare": "husky install", - "test": "jest --setupFiles dotenv/config --coverage", + "test": "jest --coverage", "worker": "wrangler dev --env dev --port 4000" }, "keywords": [ @@ -33,7 +33,6 @@ "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", "@ubiquity-dao/ubiquibot-logger": "^1.3.0", - "dotenv": "16.4.5", "openai": "^4.52.7", "typebox-validators": "0.3.5" }, @@ -82,4 +81,4 @@ ] }, "packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" -} +} \ No newline at end of file diff --git a/src/types/github.ts b/src/types/github.ts index 2b7d857..6448272 100644 --- a/src/types/github.ts +++ b/src/types/github.ts @@ -4,8 +4,6 @@ import { Context } from "./context"; export type Issue = RestEndpointMethodTypes["issues"]["get"]["response"]["data"]; export type IssueComments = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"]; export type ReviewComments = RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"]; -export type IssueComment = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"][0]; -export type ReviewComment = RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"][0]; export type FetchParams = { context: Context; From 442dacb60e85fd1c1080ac8ef96226cea55b12ed Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:26:27 +0100 Subject: [PATCH 31/72] chore: update .lock file --- yarn.lock | 5 ----- 1 file changed, 5 deletions(-) diff --git a/yarn.lock b/yarn.lock index b6d449b..cf586c6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3014,11 +3014,6 @@ dot-prop@^5.1.0: dependencies: is-obj "^2.0.0" -dotenv@16.4.5: - version "16.4.5" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" - integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== - easy-table@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/easy-table/-/easy-table-1.2.0.tgz#ba9225d7138fee307bfd4f0b5bc3c04bdc7c54eb" From 9f39f24a5b38c44b7a41230fac5a57adf7f7486a Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:33:27 +0100 Subject: [PATCH 32/72] chore: fix key --- src/handlers/comments.ts | 3 --- src/utils/issue.ts | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index 5c33c7b..e5fbad0 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -30,9 +30,6 @@ export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { export function createKey(issueUrl: string, issue?: number) { if (!issueUrl) throw new Error("issueUrl is required"); - if (issueUrl.includes("undefined")) { - throw new Error("issueUrl is not valid"); - } const [, , , , issueOrg, issueRepo, , issueNumber] = issueUrl.split("/"); return `${issueOrg}/${issueRepo}/${issueNumber || issue}`; diff --git a/src/utils/issue.ts b/src/utils/issue.ts index fdea564..831ece2 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -84,7 +84,7 @@ async function fetchAndHandleIssue( } async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen?: Set) { - if (alreadySeen && alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { + if (alreadySeen && alreadySeen.has(createKey(`////${params.owner}/${params.repo}/${params.issueNum}`))) { return; } const { linkedIssues, seen, specOrBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); From 8b682c81556e957a42bcc2108ae24a04e1f7f9ff Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:41:32 +0100 Subject: [PATCH 33/72] chore: code cleanup --- package.json | 2 +- src/handlers/ask-gpt.ts | 2 +- src/utils/format-chat-history.ts | 2 +- src/utils/issue-fetching.ts | 159 +++++++++++++++++++++ src/utils/issue-handling.ts | 83 +++++++++++ src/utils/issue.ts | 236 +------------------------------ 6 files changed, 249 insertions(+), 235 deletions(-) create mode 100644 src/utils/issue-fetching.ts create mode 100644 src/utils/issue-handling.ts diff --git a/package.json b/package.json index 4d1ceef..75753b8 100644 --- a/package.json +++ b/package.json @@ -81,4 +81,4 @@ ] }, "packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" -} \ No newline at end of file +} diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 1c96b45..94a22d1 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,9 +1,9 @@ import OpenAI from "openai"; import { Context } from "../types"; -import { recursivelyFetchLinkedIssues } from "../utils/issue"; import { createChatHistory, formatChatHistory } from "../utils/format-chat-history"; import { addCommentToIssue } from "./add-comment"; +import { recursivelyFetchLinkedIssues } from "../utils/issue-fetching"; export async function askQuestion(context: Context, question: string) { if (!question) { diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index b7d1513..38a7ad1 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -1,8 +1,8 @@ import { ChatCompletionMessageParam } from "openai/resources"; import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; -import { fetchIssue, fetchPullRequestDiff } from "./issue"; import { createKey } from "../handlers/comments"; +import { fetchPullRequestDiff, fetchIssue } from "./issue-fetching"; export async function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record) { const convoKeys = Object.keys(streamlined); diff --git a/src/utils/issue-fetching.ts b/src/utils/issue-fetching.ts new file mode 100644 index 0000000..c7530ea --- /dev/null +++ b/src/utils/issue-fetching.ts @@ -0,0 +1,159 @@ +import { createKey, getAllStreamlinedComments } from "../handlers/comments"; +import { Context } from "../types"; +import { FetchParams, Issue, LinkedIssues } from "../types/github"; +import { StreamlinedComment } from "../types/gpt"; +import { dedupeStreamlinedComments, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; +import { handleIssue, handleSpec, handleSpecAndBodyKeys } from "./issue-handling"; + +export async function recursivelyFetchLinkedIssues(params: FetchParams) { + const { linkedIssues, seen, specOrBodies, streamlinedComments } = await fetchLinkedIssues(params); + + const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); + await Promise.allSettled(fetchPromises); + + const linkedIssuesKeys = linkedIssues.map((issue) => createKey(issue.url, issue.issueNumber)); + const specAndBodyKeys = Array.from(new Set([...Object.keys(specOrBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); + await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); + + return { linkedIssues, specAndBodies: specOrBodies, streamlinedComments }; +} + +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { + const { octokit } = context; + + try { + const diff = await octokit.pulls.get({ + owner: org, + repo, + pull_number: issue, + mediaType: { + format: "diff", + }, + }); + return diff.data as unknown as string; + } catch (e) { + return null; + } +} + +export async function fetchIssue(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + return await octokit.issues + .get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }) + .then(({ data }) => data as Issue); +} + +export async function fetchIssueComments(params: FetchParams) { + const { octokit, payload } = params.context; + const { issueNum, owner, repo } = params; + + const issue = await fetchIssue(params); + + let comments; + if (issue.pull_request) { + /** + * With every review comment with a tagged code line we have `diff_hunk` which is great context + * but could easily max our tokens. + */ + comments = await octokit.paginate(octokit.pulls.listReviewComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + pull_number: issueNum || payload.issue.number, + }); + } else { + comments = await octokit.paginate(octokit.issues.listComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + } + + return { + issue, + comments, + }; +} + +export async function fetchLinkedIssues(params: FetchParams) { + const { comments, issue } = await fetchIssueComments(params); + const issueKey = createKey(issue.url); + const [owner, repo, issueNumber] = splitKey(issueKey); + const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.url }]; + + const specOrBodies: Record = {}; + specOrBodies[issueKey] = issue.body || ""; + + const seen = new Set(); + seen.add(issueKey); + + for (const comment of comments) { + let url = ""; + if ("issue_url" in comment) { + url = comment.issue_url; + } else if ("pull_request_url" in comment) { + url = comment.pull_request_url; + } + + const key = createKey(url); + const linkedIssue = idIssueFromComment(key.split("/")[0], comment.body, { + repo: key.split("/")[1], + issueNum: parseInt(key.split("/")[2]), + context: params.context, + }); + + if (linkedIssue) { + const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); + seen.add(linkedKey); + const [owner, repo, issueNumber] = splitKey(linkedKey); + + const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ + context: params.context, + issueNum: parseInt(issueNumber), + owner, + repo, + }); + + specOrBodies[linkedKey] = fetchedIssue.body || ""; + linkedIssue.body = fetchedIssue.body || ""; + linkedIssue.comments = fetchedComments; + linkedIssues.push(linkedIssue); + } + } + + return { streamlinedComments: await getAllStreamlinedComments(linkedIssues), linkedIssues, specOrBodies, seen }; +} + +export async function fetchAndHandleIssue( + key: string, + params: FetchParams, + streamlinedComments: Record, + seen: Set +): Promise { + const [owner, repo, issueNumber] = splitKey(key); + await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNumber) }, streamlinedComments, seen); + return streamlinedComments[key] || []; +} + +export async function fetchCommentsAndHandleSpec( + params: FetchParams, + linkedIssue: LinkedIssues, + streamlinedComments: Record, + specOrBodies: Record, + seen: Set +) { + if (linkedIssue.comments) { + const streamed = await getAllStreamlinedComments([linkedIssue]); + const merged = mergeStreamlinedComments(streamlinedComments, streamed); + streamlinedComments = { ...streamlinedComments, ...merged }; + } + + if (linkedIssue.body) { + await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); + } +} diff --git a/src/utils/issue-handling.ts b/src/utils/issue-handling.ts new file mode 100644 index 0000000..0371e81 --- /dev/null +++ b/src/utils/issue-handling.ts @@ -0,0 +1,83 @@ +import { createKey } from "../handlers/comments"; +import { FetchParams } from "../types/github"; +import { StreamlinedComment } from "../types/gpt"; +import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; +import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, fetchCommentsAndHandleSpec } from "./issue-fetching"; + +export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen?: Set) { + if (alreadySeen && alreadySeen.has(createKey(`////${params.owner}/${params.repo}/${params.issueNum}`))) { + return; + } + const { linkedIssues, seen, specOrBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); + const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); + await Promise.allSettled(fetchPromises); + return mergeStreamlinedComments(streamlinedComments, streamlined); +} + +export async function handleSpec( + params: FetchParams, + specOrBody: string, + specAndBodies: Record, + key: string, + seen: Set, + streamlinedComments: Record +) { + specAndBodies[key] = specOrBody; + const [owner, repo, issueNumber] = splitKey(key); + const anotherReferencedIssue = idIssueFromComment(owner, specOrBody, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); + + if (anotherReferencedIssue) { + const anotherKey = createKey(anotherReferencedIssue.url, anotherReferencedIssue.issueNumber); + if (seen.has(anotherKey)) { + return; + } + seen.add(anotherKey); + const issue = await fetchIssue({ + ...params, + owner: anotherReferencedIssue.owner, + repo: anotherReferencedIssue.repo, + issueNum: anotherReferencedIssue.issueNumber, + }); + if (issue.body) { + specAndBodies[anotherKey] = issue.body; + } + const [owner, repo, issueNum] = splitKey(anotherKey); + if (!streamlinedComments[anotherKey]) { + await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); + await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue.body || "", specAndBodies, anotherKey, seen, streamlinedComments); + } + } + + return specAndBodies; +} + +export async function handleComment( + params: FetchParams, + comment: StreamlinedComment, + streamlinedComments: Record, + seen: Set +) { + const [, , , , owner, repo, , issueNumber] = comment.issueUrl.split("/"); + const anotherReferencedIssue = idIssueFromComment(owner, comment.body, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); + + if (anotherReferencedIssue) { + const key = createKey(anotherReferencedIssue.url); + const [refOwner, refRepo, refIssueNumber] = splitKey(key); + + if (!streamlinedComments[key]) { + await handleIssue({ ...params, owner: refOwner, repo: refRepo, issueNum: parseInt(refIssueNumber) }, streamlinedComments, seen); + } + } +} + +export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { + const commentProcessingPromises = keys.map(async (key) => { + let comments = streamlinedComments[key]; + if (!comments || comments.length === 0) { + comments = await fetchAndHandleIssue(key, params, streamlinedComments, seen); + } + return Promise.all(comments.map((comment: StreamlinedComment) => handleComment(params, comment, streamlinedComments, seen))); + }); + + await Promise.all(commentProcessingPromises); +} diff --git a/src/utils/issue.ts b/src/utils/issue.ts index 831ece2..2eb0228 100644 --- a/src/utils/issue.ts +++ b/src/utils/issue.ts @@ -1,22 +1,7 @@ -import { createKey, getAllStreamlinedComments } from "../handlers/comments"; -import { Context } from "../types"; -import { FetchParams, Issue, LinkedIssues } from "../types/github"; +import { FetchParams, LinkedIssues } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; -export async function recursivelyFetchLinkedIssues(params: FetchParams) { - const { linkedIssues, seen, specOrBodies, streamlinedComments } = await fetchLinkedIssues(params); - - const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); - await Promise.allSettled(fetchPromises); - - const linkedIssuesKeys = linkedIssues.map((issue) => createKey(issue.url, issue.issueNumber)); - const specAndBodyKeys = Array.from(new Set([...Object.keys(specOrBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); - await processSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); - - return { linkedIssues, specAndBodies: specOrBodies, streamlinedComments }; -} - -function dedupeStreamlinedComments(streamlinedComments: Record) { +export function dedupeStreamlinedComments(streamlinedComments: Record) { for (const key of Object.keys(streamlinedComments)) { streamlinedComments[key] = streamlinedComments[key].filter( (comment: StreamlinedComment, index: number, self: StreamlinedComment[]) => index === self.findIndex((t: StreamlinedComment) => t.body === comment.body) @@ -26,37 +11,7 @@ function dedupeStreamlinedComments(streamlinedComments: Record, - specOrBodies: Record, - seen: Set -) { - if (linkedIssue.comments) { - const streamed = await getAllStreamlinedComments([linkedIssue]); - const merged = mergeStreamlinedComments(streamlinedComments, streamed); - streamlinedComments = { ...streamlinedComments, ...merged }; - } - - if (linkedIssue.body) { - await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); - } -} - -async function processSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { - const commentProcessingPromises = keys.map(async (key) => { - let comments = streamlinedComments[key]; - if (!comments || comments.length === 0) { - comments = await fetchAndHandleIssue(key, params, streamlinedComments, seen); - } - return Promise.all(comments.map((comment: StreamlinedComment) => handleComment(params, comment, streamlinedComments, seen))); - }); - - await Promise.all(commentProcessingPromises); -} - -function mergeStreamlinedComments(existingComments: Record, newComments: Record) { +export function mergeStreamlinedComments(existingComments: Record, newComments: Record) { if (!existingComments) { existingComments = {}; } @@ -72,128 +27,7 @@ function mergeStreamlinedComments(existingComments: Record, - seen: Set -): Promise { - const [owner, repo, issueNumber] = splitKey(key); - await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNumber) }, streamlinedComments, seen); - return streamlinedComments[key] || []; -} - -async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen?: Set) { - if (alreadySeen && alreadySeen.has(createKey(`////${params.owner}/${params.repo}/${params.issueNum}`))) { - return; - } - const { linkedIssues, seen, specOrBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); - const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); - await Promise.allSettled(fetchPromises); - return mergeStreamlinedComments(streamlinedComments, streamlined); -} - -async function handleSpec( - params: FetchParams, - specOrBody: string, - specAndBodies: Record, - key: string, - seen: Set, - streamlinedComments: Record -) { - specAndBodies[key] = specOrBody; - const [owner, repo, issueNumber] = splitKey(key); - const anotherReferencedIssue = idIssueFromComment(owner, specOrBody, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); - - if (anotherReferencedIssue) { - const anotherKey = createKey(anotherReferencedIssue.url, anotherReferencedIssue.issueNumber); - if (seen.has(anotherKey)) { - return; - } - seen.add(anotherKey); - const issue = await fetchIssue({ - ...params, - owner: anotherReferencedIssue.owner, - repo: anotherReferencedIssue.repo, - issueNum: anotherReferencedIssue.issueNumber, - }); - if (issue.body) { - specAndBodies[anotherKey] = issue.body; - } - const [owner, repo, issueNum] = splitKey(anotherKey); - if (!streamlinedComments[anotherKey]) { - await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); - await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue.body || "", specAndBodies, anotherKey, seen, streamlinedComments); - } - } - - return specAndBodies; -} - -async function handleComment(params: FetchParams, comment: StreamlinedComment, streamlinedComments: Record, seen: Set) { - const [, , , , owner, repo, , issueNumber] = comment.issueUrl.split("/"); - const anotherReferencedIssue = idIssueFromComment(owner, comment.body, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); - - if (anotherReferencedIssue) { - const key = createKey(anotherReferencedIssue.url); - const [refOwner, refRepo, refIssueNumber] = splitKey(key); - - if (!streamlinedComments[key]) { - await handleIssue({ ...params, owner: refOwner, repo: refRepo, issueNum: parseInt(refIssueNumber) }, streamlinedComments, seen); - } - } -} - -export async function fetchLinkedIssues(params: FetchParams) { - const { comments, issue } = await fetchIssueComments(params); - const issueKey = createKey(issue.url); - const [owner, repo, issueNumber] = splitKey(issueKey); - const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.url }]; - - const specOrBodies: Record = {}; - specOrBodies[issueKey] = issue.body || ""; - - const seen = new Set(); - seen.add(issueKey); - - for (const comment of comments) { - let url = ""; - if ("issue_url" in comment) { - url = comment.issue_url; - } else if ("pull_request_url" in comment) { - url = comment.pull_request_url; - } - - const key = createKey(url); - const linkedIssue = idIssueFromComment(key.split("/")[0], comment.body, { - repo: key.split("/")[1], - issueNum: parseInt(key.split("/")[2]), - context: params.context, - }); - - if (linkedIssue) { - const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); - seen.add(linkedKey); - const [owner, repo, issueNumber] = splitKey(linkedKey); - - const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ - context: params.context, - issueNum: parseInt(issueNumber), - owner, - repo, - }); - - specOrBodies[linkedKey] = fetchedIssue.body || ""; - linkedIssue.body = fetchedIssue.body || ""; - linkedIssue.comments = fetchedComments; - linkedIssues.push(linkedIssue); - } - } - - return { streamlinedComments: await getAllStreamlinedComments(linkedIssues), linkedIssues, specOrBodies, seen }; -} - -function splitKey(key: string): [string, string, string] { +export function splitKey(key: string): [string, string, string] { const parts = key.split("/"); return [parts[0], parts[1], parts[2]]; } @@ -226,65 +60,3 @@ export function idIssueFromComment(owner?: string, comment?: string | null, para return null; } - -export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { - const { octokit } = context; - - try { - const diff = await octokit.pulls.get({ - owner: org, - repo, - pull_number: issue, - mediaType: { - format: "diff", - }, - }); - return diff.data as unknown as string; - } catch (e) { - return null; - } -} - -export async function fetchIssue(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; - - return await octokit.issues - .get({ - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }) - .then(({ data }) => data as Issue); -} - -export async function fetchIssueComments(params: FetchParams) { - const { octokit, payload } = params.context; - const { issueNum, owner, repo } = params; - - const issue = await fetchIssue(params); - - let comments; - if (issue.pull_request) { - /** - * With every review comment with a tagged code line we have `diff_hunk` which is great context - * but could easily max our tokens. - */ - comments = await octokit.paginate(octokit.pulls.listReviewComments, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - pull_number: issueNum || payload.issue.number, - }); - } else { - comments = await octokit.paginate(octokit.issues.listComments, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }); - } - - return { - issue, - comments, - }; -} From 230abe055517c53b25d880d4f22c2afb3fa3cde8 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 23 Jul 2024 23:51:49 +0100 Subject: [PATCH 34/72] chore: add another bot comment filter --- src/types/github.ts | 2 +- src/utils/issue-fetching.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/types/github.ts b/src/types/github.ts index 6448272..313f939 100644 --- a/src/types/github.ts +++ b/src/types/github.ts @@ -16,6 +16,6 @@ export type LinkedIssues = { repo: string; owner: string; url: string; - comments?: IssueComments | ReviewComments; + comments?: IssueComments | ReviewComments | null | undefined; body?: string; }; diff --git a/src/utils/issue-fetching.ts b/src/utils/issue-fetching.ts index c7530ea..8e1efc9 100644 --- a/src/utils/issue-fetching.ts +++ b/src/utils/issue-fetching.ts @@ -1,6 +1,6 @@ import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; -import { FetchParams, Issue, LinkedIssues } from "../types/github"; +import { FetchParams, Issue, IssueComments, LinkedIssues, ReviewComments } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; import { dedupeStreamlinedComments, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { handleIssue, handleSpec, handleSpecAndBodyKeys } from "./issue-handling"; @@ -76,7 +76,7 @@ export async function fetchIssueComments(params: FetchParams) { return { issue, - comments, + comments: comments.filter((comment) => comment.user?.type !== "Bot") as IssueComments | ReviewComments, }; } From f9a8aa4a5b7724b92ec04297937d52208dd360bb Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:31:27 +0100 Subject: [PATCH 35/72] chore(deps): types/jest for namespace --- package.json | 1 + yarn.lock | 12 ++++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 75753b8..e4f0539 100644 --- a/package.json +++ b/package.json @@ -45,6 +45,7 @@ "@eslint/js": "9.5.0", "@jest/globals": "29.7.0", "@mswjs/data": "0.16.1", + "@types/jest": "^29.5.12", "@types/node": "20.14.5", "cspell": "8.9.0", "eslint": "9.5.0", diff --git a/yarn.lock b/yarn.lock index cf586c6..9e1c154 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1872,6 +1872,14 @@ dependencies: "@types/istanbul-lib-report" "*" +"@types/jest@^29.5.12": + version "29.5.12" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.12.tgz#7f7dc6eb4cf246d2474ed78744b05d06ce025544" + integrity sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + "@types/lodash@^4.14.172": version "4.17.4" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.4.tgz#0303b64958ee070059e3a7184048a55159fe20b7" @@ -3408,7 +3416,7 @@ exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== -expect@^29.7.0: +expect@^29.0.0, expect@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== @@ -5463,7 +5471,7 @@ prettier@3.3.2: resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.3.2.tgz#03ff86dc7c835f2d2559ee76876a3914cec4a90a" integrity sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA== -pretty-format@^29.7.0: +pretty-format@^29.0.0, pretty-format@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== From 1fd941405373608df69b233e27f95f3f8f6785e8 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:32:40 +0100 Subject: [PATCH 36/72] chore: eslint style and optimizing handlers --- tests/__mocks__/handlers.ts | 77 +++++++++++++++++-------------------- tests/main.test.ts | 8 ++-- 2 files changed, 39 insertions(+), 46 deletions(-) diff --git a/tests/__mocks__/handlers.ts b/tests/__mocks__/handlers.ts index 3d4536e..20503d9 100644 --- a/tests/__mocks__/handlers.ts +++ b/tests/__mocks__/handlers.ts @@ -1,5 +1,3 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -/* eslint-disable sonarjs/no-duplicate-string */ import { http, HttpResponse } from "msw"; import { db } from "./db"; import issueTemplate from "./issue-template"; @@ -27,11 +25,11 @@ export const handlers = [ }); }), // GET https://api.github.com/repos/ubiquity/test-repo/issues/1 - http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number", ({ params: { owner, repo, issue_number } }) => { - return HttpResponse.json( - db.issue.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(issue_number) } } }) - ); - }), + http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number", ({ params: { owner, repo, issue_number: issueNumber } }) => + HttpResponse.json( + db.issue.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(issueNumber) } } }) + ) + ), // get repo http.get("https://api.github.com/repos/:owner/:repo", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { @@ -42,9 +40,9 @@ export const handlers = [ return HttpResponse.json(item); }), // get issue - http.get("https://api.github.com/repos/:owner/:repo/issues", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { - return HttpResponse.json(db.issue.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })); - }), + http.get("https://api.github.com/repos/:owner/:repo/issues", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => + HttpResponse.json(db.issue.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })) + ), // create issue http.post("https://api.github.com/repos/:owner/:repo/issues", () => { const id = db.issue.count() + 1; @@ -53,41 +51,38 @@ export const handlers = [ return HttpResponse.json(newItem); }), // get repo issues - http.get("https://api.github.com/orgs/:org/repos", ({ params: { org } }: { params: { org: string } }) => { - return HttpResponse.json(db.repo.findMany({ where: { owner: { login: { equals: org } } } })); - }), + http.get("https://api.github.com/orgs/:org/repos", ({ params: { org } }: { params: { org: string } }) => + HttpResponse.json(db.repo.findMany({ where: { owner: { login: { equals: org } } } })) + ), // add comment to issue - http.post("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number } }) => { - return HttpResponse.json({ owner, repo, issue_number }); - }), + http.post("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number: issueNumber } }) => + HttpResponse.json({ owner, repo, issueNumber }) + ), // list pull requests - http.get("https://api.github.com/repos/:owner/:repo/pulls", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => { - return HttpResponse.json(db.pull.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })); - }), + http.get("https://api.github.com/repos/:owner/:repo/pulls", ({ params: { owner, repo } }: { params: { owner: string; repo: string } }) => + HttpResponse.json(db.pull.findMany({ where: { owner: { equals: owner }, repo: { equals: repo } } })) + ), // update a pull request - http.patch("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number } }) => { - return HttpResponse.json({ owner, repo, pull_number }); - }), - // issues list for repo - http.get("https://api.github.com/repos/:owner/:repo/issues", ({ params: { owner, repo } }) => { - return HttpResponse.json(db.issue.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string } } })); - }), + http.patch("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number: pullNumber } }) => + HttpResponse.json({ owner, repo, pull_number: pullNumber }) + ), + // list issue comments - http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number } }) => { - return HttpResponse.json( - db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(issue_number) } } }) - ); - }), + http.get("https://api.github.com/repos/:owner/:repo/issues/:issue_number/comments", ({ params: { owner, repo, issue_number: issueNumber } }) => + HttpResponse.json( + db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(issueNumber) } } }) + ) + ), //list review comments - http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/comments", ({ params: { owner, repo, pull_number } }) => { - return HttpResponse.json( - db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(pull_number) } } }) - ); - }), + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number/comments", ({ params: { owner, repo, pull_number: pullNumber } }) => + HttpResponse.json( + db.comments.findMany({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, issue_number: { equals: Number(pullNumber) } } }) + ) + ), // octokit.pulls.get - http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number } }) => { - return HttpResponse.json( - db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pull_number) } } }) - ); - }), + http.get("https://api.github.com/repos/:owner/:repo/pulls/:pull_number", ({ params: { owner, repo, pull_number: pullNumber } }) => + HttpResponse.json( + db.pull.findFirst({ where: { owner: { equals: owner as string }, repo: { equals: repo as string }, number: { equals: Number(pullNumber) } } }) + ) + ), ]; diff --git a/tests/main.test.ts b/tests/main.test.ts index 8b6333b..879f4ce 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -1,7 +1,7 @@ import { db } from "./__mocks__/db"; import { server } from "./__mocks__/node"; import usersGet from "./__mocks__/users-get.json"; -import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it, jest } from "@jest/globals"; +import { expect, describe, beforeAll, beforeEach, afterAll, afterEach, it } from "@jest/globals"; import { Logs } from "@ubiquity-dao/ubiquibot-logger"; import { Context, SupportedEventsU } from "../src/types"; import { drop } from "@mswjs/data"; @@ -30,10 +30,8 @@ type Comment = { pull_request_url?: string; }; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -const octokit = jest.requireActual("@octokit/rest") as any; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -jest.requireActual("openai") as any; +const octokit = jest.requireActual("@octokit/rest"); +jest.requireActual("openai"); beforeAll(() => { server.listen(); From 1160999540503093c3e989d464c4e3301e78e03f Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:33:09 +0100 Subject: [PATCH 37/72] chore: unused ternary --- src/utils/format-chat-history.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index 38a7ad1..4f45706 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -97,11 +97,7 @@ async function createContextBlockSection( return block.join(""); } - const diffBlock = [ - createHeader("Linked Pull Request Code Diff", repoString), - isPull ? isPull : "No diff available", - createFooter("Linked Pull Request Code Diff"), - ]; + const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), isPull, createFooter("Linked Pull Request Code Diff")]; return block.concat(diffBlock).join(""); } From 4771e02aa3677940605818f96d2f2eb34fc70b28 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:41:19 +0100 Subject: [PATCH 38/72] chore: system msg format fix --- src/utils/format-chat-history.ts | 6 +++--- tests/main.test.ts | 12 +++++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index 4f45706..a3d1d4e 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -132,9 +132,9 @@ export function createChatHistory(formattedChat: string) { const systemMessage: ChatCompletionMessageParam = { role: "system", content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. - Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. - The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. - Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`.trim(), +Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. +The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. +Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.` }; const userMessage: ChatCompletionMessageParam = { diff --git a/tests/main.test.ts b/tests/main.test.ts index 879f4ce..20f1ea4 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -14,6 +14,11 @@ const TEST_QUESTION = "What is pi?"; const TEST_SLASH_COMMAND = "/gpt what is pi?"; const LOG_CALLER = "_Logs."; +const systemMsg = `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. +Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. +The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. +Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.` + type Comment = { id: number; user: { @@ -132,14 +137,15 @@ This is a demo spec for a demo task just perfect for testing. === End Current Issue #1 Conversation ===\n `; + + expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: what is pi?"); expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { caller: LOG_CALLER, chat: [ { role: "system", - content: - "You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests.\n Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements.\n The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked.\n Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.", + content: systemMsg, }, { role: "user", @@ -211,7 +217,7 @@ Just another issue chat: [ { role: "system", - content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests.\n Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements.\n The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked.\n Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, + content: systemMsg, }, { role: "user", From d9d440aad4126a2cecc4c8dbc829e641a0ddaef8 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 24 Sep 2024 16:19:02 +0100 Subject: [PATCH 39/72] feat: ubiquity_os_app_slug config item --- src/plugin.ts | 5 ++--- src/types/plugin-inputs.ts | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/plugin.ts b/src/plugin.ts index 486a596..645e866 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -25,14 +25,13 @@ export async function setupAndRun(inputs: PluginInputs) { export async function plugin(context: Context) { const { logger, - config: { isEnabled }, + config: { isEnabled, ubiquity_os_app_slug }, } = context; if (isSupportedEvent(context.eventName)) { const comment = context.payload.comment.body; - if (!comment.startsWith("/gpt")) { - logger.info("Comment does not start with /gpt. Skipping."); + if (!comment.includes(`@${ubiquity_os_app_slug}`)) { return; } diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 03a608e..f37bf8b 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -22,6 +22,7 @@ export interface PluginInputs Date: Tue, 24 Sep 2024 16:25:55 +0100 Subject: [PATCH 40/72] chore: move api key into env, remove isEnabled, add manifest --- manifest.json | 7 +++++++ src/plugin.ts | 18 +++++++----------- src/types/context.ts | 2 ++ src/types/env.ts | 19 +++++++++++++++++++ src/types/plugin-inputs.ts | 2 -- src/worker.ts | 30 +++++++++++++++++++++++++----- 6 files changed, 60 insertions(+), 18 deletions(-) create mode 100644 manifest.json create mode 100644 src/types/env.ts diff --git a/manifest.json b/manifest.json new file mode 100644 index 0000000..f947209 --- /dev/null +++ b/manifest.json @@ -0,0 +1,7 @@ +{ + "name": "gpt", + "description": "gpt", + "ubiquity:listeners": [ + "issue_comment.created" + ] +} \ No newline at end of file diff --git a/src/plugin.ts b/src/plugin.ts index 645e866..1f54f0a 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -4,8 +4,9 @@ import { Context } from "./types"; import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Env } from "./types/env"; -export async function setupAndRun(inputs: PluginInputs) { +export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); const context: Context = { @@ -13,19 +14,20 @@ export async function setupAndRun(inputs: PluginInputs) { payload: inputs.eventPayload, config: inputs.settings, octokit, - logger: new Logs("info"), + env, + logger: new Logs("debug"), }; - return await plugin(context); + return runPlugin(context); } /** * How a worker executes the plugin. */ -export async function plugin(context: Context) { +export async function runPlugin(context: Context) { const { logger, - config: { isEnabled, ubiquity_os_app_slug }, + config: { ubiquity_os_app_slug }, } = context; if (isSupportedEvent(context.eventName)) { @@ -40,12 +42,6 @@ export async function plugin(context: Context) { return; } - if (!isEnabled) { - const log = logger.info("The /gpt command is disabled. Enable it in the plugin settings."); - await addCommentToIssue(context, log?.logMessage.diff as string); - return; - } - const question = comment.slice(4).trim(); logger.info(`Asking question: ${question}`); diff --git a/src/types/context.ts b/src/types/context.ts index 5fd5504..180ecc0 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -2,6 +2,7 @@ import { Octokit } from "@octokit/rest"; import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as WebhookEventName } from "@octokit/webhooks"; import { PluginSettings } from "./plugin-inputs"; import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { Env } from "./env"; export type SupportedEventsU = "issue_comment.created"; @@ -15,4 +16,5 @@ export interface Context; config: PluginSettings; logger: Logs; + env: Env } diff --git a/src/types/env.ts b/src/types/env.ts new file mode 100644 index 0000000..4cbec19 --- /dev/null +++ b/src/types/env.ts @@ -0,0 +1,19 @@ +import { Type as T } from "@sinclair/typebox"; +import { StaticDecode } from "@sinclair/typebox"; +import "dotenv/config"; +import { StandardValidator } from "typebox-validators"; + +/** + * Define sensitive environment variables here. + * + * These are fed into the worker/workflow as `env` and are + * taken from either `dev.vars` or repository secrets. + * They are used with `process.env` but are type-safe. + */ +export const envSchema = T.Object({ + openAi_apiKey: T.String(), +}); + +export const envValidator = new StandardValidator(envSchema); + +export type Env = StaticDecode; \ No newline at end of file diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index f37bf8b..b04d5f1 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -20,8 +20,6 @@ export interface PluginInputs { + async fetch(request: Request, env: Env): Promise { try { + if (request.method === "GET") { + const url = new URL(request.url); + if (url.pathname === "/manifest.json") { + return new Response(JSON.stringify(manifest), { + headers: { "content-type": "application/json" }, + }); + } + } if (request.method !== "POST") { return new Response(JSON.stringify({ error: `Only POST requests are supported.` }), { status: 405, @@ -18,10 +28,9 @@ export default { headers: { "content-type": "application/json" }, }); } - const webhookPayload = await request.json(); const settings = Value.Decode(pluginSettingsSchema, Value.Default(pluginSettingsSchema, webhookPayload.settings)); - + const decodedEnv = Value.Decode(envValidator.schema, Value.Default(envValidator.schema, env)); if (!pluginSettingsValidator.test(settings)) { const errors: string[] = []; for (const error of pluginSettingsValidator.errors(settings)) { @@ -33,9 +42,20 @@ export default { headers: { "content-type": "application/json" }, }); } + if (!envValidator.test(decodedEnv)) { + const errors: string[] = []; + for (const error of envValidator.errors(decodedEnv)) { + console.error(error); + errors.push(`${error.path}: ${error.message}`); + } + return new Response(JSON.stringify({ error: `Error: "Invalid environment provided. ${errors.join("; ")}"` }), { + status: 400, + headers: { "content-type": "application/json" }, + }); + } webhookPayload.settings = settings; - await setupAndRun(webhookPayload); + await plugin(webhookPayload, decodedEnv); return new Response(JSON.stringify("OK"), { status: 200, headers: { "content-type": "application/json" } }); } catch (error) { return handleUncaughtError(error); From de5a65cbf60ee08af1532ddb4c37ddaa3f1aee97 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Tue, 24 Sep 2024 16:34:19 +0100 Subject: [PATCH 41/72] chore: remove workflow items, correct test imports, update logger --- .github/workflows/compute.yml | 38 ---------------------- package.json | 4 +-- src/handlers/ask-gpt.ts | 8 ++--- src/main.ts | 54 -------------------------------- src/plugin.ts | 7 ++--- src/utils/format-chat-history.ts | 3 +- tests/main.test.ts | 18 +++++------ yarn.lock | 8 ++--- 8 files changed, 23 insertions(+), 117 deletions(-) delete mode 100644 .github/workflows/compute.yml delete mode 100644 src/main.ts diff --git a/.github/workflows/compute.yml b/.github/workflows/compute.yml deleted file mode 100644 index 09e35d5..0000000 --- a/.github/workflows/compute.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: "command-gpt" - -on: - workflow_dispatch: - inputs: - stateId: - description: "State Id" - eventName: - description: "Event Name" - eventPayload: - description: "Event Payload" - settings: - description: "Settings" - authToken: - description: "Auth Token" - ref: - description: "Ref" - -jobs: - compute: - name: "command-gpt" - runs-on: ubuntu-latest - permissions: write-all - - steps: - - uses: actions/checkout@v4 - - - name: setup node - uses: actions/setup-node@v4 - with: - node-version: "20.10.0" - - - name: install dependencies - run: yarn - - - name: execute directive - run: npx tsx ./src/main.ts - id: command-gpt diff --git a/package.json b/package.json index e4f0539..157cf7a 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", - "@ubiquity-dao/ubiquibot-logger": "^1.3.0", + "@ubiquity-dao/ubiquibot-logger": "^1.3.1", "openai": "^4.52.7", "typebox-validators": "0.3.5" }, @@ -81,5 +81,5 @@ "@commitlint/config-conventional" ] }, - "packageManager": "yarn@1.22.22+sha1.ac34549e6aa8e7ead463a7407e1c7390f61a6610" + "packageManager": "yarn@1.22.22" } diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 94a22d1..005c692 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -8,7 +8,7 @@ import { recursivelyFetchLinkedIssues } from "../utils/issue-fetching"; export async function askQuestion(context: Context, question: string) { if (!question) { const log = context.logger.error(`No question provided`); - await addCommentToIssue(context, log?.logMessage.diff as string); + await addCommentToIssue(context, log?.logMessage.diff); return; } @@ -21,12 +21,12 @@ export async function askQuestion(context: Context, question: string) { export async function askGpt(context: Context, formattedChat: string) { const { logger, - config: { openAi_apiKey }, + env: { openAi_apiKey }, } = context; if (!openAi_apiKey) { const log = logger.error(`No OpenAI API Key detected!`); - await addCommentToIssue(context, log?.logMessage.diff as string); // TOO confirm correct style here + await addCommentToIssue(context, log?.logMessage.diff); return; } @@ -44,7 +44,7 @@ export async function askGpt(context: Context, formattedChat: string) { if (!res.choices) { const log = logger.error(`No response from OpenAI`); - await addCommentToIssue(context, log?.logMessage.diff as string); + await addCommentToIssue(context, log?.logMessage.diff); return; } diff --git a/src/main.ts b/src/main.ts deleted file mode 100644 index 78b7e44..0000000 --- a/src/main.ts +++ /dev/null @@ -1,54 +0,0 @@ -import * as core from "@actions/core"; -import * as github from "@actions/github"; -import { Octokit } from "@octokit/rest"; -import { Value } from "@sinclair/typebox/value"; -import { pluginSettingsSchema, PluginInputs, pluginSettingsValidator } from "./types"; -import { setupAndRun } from "./plugin"; - -/** - * How a GitHub action executes the plugin. - */ -export async function run() { - const payload = github.context.payload.inputs; - - const settings = Value.Decode(pluginSettingsSchema, Value.Default(pluginSettingsSchema, JSON.parse(payload.settings))); - - if (!pluginSettingsValidator.test(settings)) { - throw new Error("Invalid settings provided"); - } - - const inputs: PluginInputs = { - stateId: payload.stateId, - eventName: payload.eventName, - eventPayload: JSON.parse(payload.eventPayload), - settings, - authToken: payload.authToken, - ref: payload.ref, - }; - - await setupAndRun(inputs); - - return returnDataToKernel(inputs.authToken, inputs.stateId, {}); -} - -async function returnDataToKernel(repoToken: string, stateId: string, output: object) { - const octokit = new Octokit({ auth: repoToken }); - await octokit.repos.createDispatchEvent({ - owner: github.context.repo.owner, - repo: github.context.repo.repo, - event_type: "return_data_to_ubiquibot_kernel", - client_payload: { - state_id: stateId, - output: JSON.stringify(output), - }, - }); -} - -run() - .then((result) => { - core.setOutput("result", result); - }) - .catch((error) => { - console.error(error); - core.setFailed(error); - }); diff --git a/src/plugin.ts b/src/plugin.ts index 1f54f0a..fb38285 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -21,9 +21,6 @@ export async function plugin(inputs: PluginInputs, env: Env) { return runPlugin(context); } -/** - * How a worker executes the plugin. - */ export async function runPlugin(context: Context) { const { logger, @@ -33,7 +30,7 @@ export async function runPlugin(context: Context) { if (isSupportedEvent(context.eventName)) { const comment = context.payload.comment.body; - if (!comment.includes(`@${ubiquity_os_app_slug}`)) { + if (!comment.startsWith(`@${ubiquity_os_app_slug} `)) { return; } @@ -42,7 +39,7 @@ export async function runPlugin(context: Context) { return; } - const question = comment.slice(4).trim(); + const question = comment.replace(`@${ubiquity_os_app_slug}`, "").trim(); logger.info(`Asking question: ${question}`); const response = await askQuestion(context, question); diff --git a/src/utils/format-chat-history.ts b/src/utils/format-chat-history.ts index a3d1d4e..fc6c0c0 100644 --- a/src/utils/format-chat-history.ts +++ b/src/utils/format-chat-history.ts @@ -9,9 +9,10 @@ export async function formatChatHistory(context: Context, streamlined: Record { const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await plugin(ctx); + await runPlugin(ctx); expect(infoSpy).toHaveBeenCalledWith("The /gpt command is disabled. Enable it in the plugin settings."); }); @@ -81,7 +81,7 @@ describe("Ask plugin tests", () => { createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); if (!ctx.payload.comment.user) return; ctx.payload.comment.user.type = "Bot"; - await plugin(ctx); + await runPlugin(ctx); expect(infoSpy).toHaveBeenCalledWith("Comment is from a bot. Skipping."); }); @@ -91,7 +91,7 @@ describe("Ask plugin tests", () => { const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await plugin(ctx); + await runPlugin(ctx); expect(infoSpy).toHaveBeenCalledWith("Comment does not start with /gpt. Skipping."); }); @@ -101,7 +101,7 @@ describe("Ask plugin tests", () => { const errorSpy = jest.spyOn(ctx.logger, "error"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await plugin(ctx); + await runPlugin(ctx); expect(errorSpy).toHaveBeenCalledWith("No question provided"); }); @@ -111,8 +111,8 @@ describe("Ask plugin tests", () => { const errorSpy = jest.spyOn(ctx.logger, "error"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - ctx.config.openAi_apiKey = ""; - await plugin(ctx); + ctx.env.openAi_apiKey = ""; + await runPlugin(ctx); expect(errorSpy).toHaveBeenNthCalledWith(1, "No OpenAI API Key detected!"); expect(errorSpy).toHaveBeenNthCalledWith(2, "No response from OpenAI"); @@ -122,7 +122,7 @@ describe("Ask plugin tests", () => { const ctx = createContext(TEST_SLASH_COMMAND); const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await plugin(ctx); + await runPlugin(ctx); expect(infoSpy).toHaveBeenCalledTimes(3); @@ -174,7 +174,7 @@ This is a demo spec for a demo task just perfect for testing. transformCommentTemplate(4, 3, "Just a comment", "ubiquity", "test-repo", true), ]); - await plugin(ctx); + await runPlugin(ctx); expect(infoSpy).toHaveBeenCalledTimes(3); diff --git a/yarn.lock b/yarn.lock index 9e1c154..ffff959 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2051,10 +2051,10 @@ "@typescript-eslint/types" "7.13.1" eslint-visitor-keys "^3.4.3" -"@ubiquity-dao/ubiquibot-logger@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@ubiquity-dao/ubiquibot-logger/-/ubiquibot-logger-1.3.0.tgz#b07364658be95b3be3876305c66b2adc906e9590" - integrity sha512-ifkd7fB2OMTSt3OL9L14bCIvCMXV+IHFdJYU5S8FUzE2U88b4xKxuEAYDFX+DX3wwDEswFAVUwx5aP3QcMIRWA== +"@ubiquity-dao/ubiquibot-logger@^1.3.1": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@ubiquity-dao/ubiquibot-logger/-/ubiquibot-logger-1.3.1.tgz#c3f45d70014dcc2551442c28101046e1c8ea6886" + integrity sha512-kDLnVP87Y3yZV6NnqIEDAOz+92IW0nIcccML2lUn93uZ5ada78vfdTPtwPJo8tkXl1Z9qMKAqqHkwBMp1Ksnag== JSONStream@^1.3.5: version "1.3.5" From b54730034b83aeb02f6430c3fa88b35469e52458 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 09:11:53 +0100 Subject: [PATCH 42/72] chore: worker workflows, dotenv, utils > helpers --- .dev.vars.example | 2 +- .env.example | 2 +- .github/workflows/jest-testing.yml | 2 +- .github/workflows/knip-reporter.yml | 2 +- .github/workflows/worker-delete.yml | 44 ++++++++++++++++ .github/workflows/worker-deploy.yml | 50 +++++++++++++++++++ package.json | 13 ++--- src/handlers/ask-gpt.ts | 16 +++--- src/{utils => helpers}/format-chat-history.ts | 0 src/{utils => helpers}/issue-fetching.ts | 0 src/{utils => helpers}/issue-handling.ts | 0 src/{utils => helpers}/issue.ts | 0 src/types/env.ts | 3 +- wrangler.toml | 2 +- yarn.lock | 21 ++++---- 15 files changed, 124 insertions(+), 33 deletions(-) create mode 100644 .github/workflows/worker-delete.yml create mode 100644 .github/workflows/worker-deploy.yml rename src/{utils => helpers}/format-chat-history.ts (100%) rename src/{utils => helpers}/issue-fetching.ts (100%) rename src/{utils => helpers}/issue-handling.ts (100%) rename src/{utils => helpers}/issue.ts (100%) diff --git a/.dev.vars.example b/.dev.vars.example index e49d79a..c9aa558 100644 --- a/.dev.vars.example +++ b/.dev.vars.example @@ -1 +1 @@ -MY_SECRET="MY_SECRET" +openAi_apiKey="MY_SECRET" diff --git a/.env.example b/.env.example index e49d79a..c9aa558 100644 --- a/.env.example +++ b/.env.example @@ -1 +1 @@ -MY_SECRET="MY_SECRET" +openAi_apiKey="MY_SECRET" diff --git a/.github/workflows/jest-testing.yml b/.github/workflows/jest-testing.yml index 7f8747e..f7cebb7 100644 --- a/.github/workflows/jest-testing.yml +++ b/.github/workflows/jest-testing.yml @@ -24,4 +24,4 @@ jobs: - name: Add Jest Report to Summary if: always() - run: echo "$(cat test-dashboard.md)" >> $GITHUB_STEP_SUMMARY + run: echo "$(cat test-dashboard.md)" >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/.github/workflows/knip-reporter.yml b/.github/workflows/knip-reporter.yml index a396735..b344507 100644 --- a/.github/workflows/knip-reporter.yml +++ b/.github/workflows/knip-reporter.yml @@ -37,4 +37,4 @@ jobs: json_input: true json_input_file_name: knip-results.json pull_request_number: ${{ steps.pr-number.outputs.content }} - token: ${{ secrets.GITHUB_TOKEN }} + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/worker-delete.yml b/.github/workflows/worker-delete.yml new file mode 100644 index 0000000..75b9c91 --- /dev/null +++ b/.github/workflows/worker-delete.yml @@ -0,0 +1,44 @@ +name: Delete Deployment + +on: + delete: + +jobs: + delete: + runs-on: ubuntu-latest + name: Delete Deployment + steps: + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20.10.0" + + - name: Enable corepack + run: corepack enable + + - uses: actions/checkout@v4 + + - name: Get Deleted Branch Name + id: get_branch + run: | + branch_name=$(echo '${{ github.event.ref }}' | sed 's#refs/heads/##' | sed 's#[^a-zA-Z0-9]#-#g') + echo "branch_name=$branch_name" >> $GITHUB_ENV + - name: Retrieve and Construct Full Worker Name + id: construct_worker_name + run: | + base_name=$(grep '^name = ' wrangler.toml | head -n 1 | sed 's/^name = "\(.*\)"$/\1/') + full_worker_name="${base_name}-${{ env.branch_name }}" + # Make sure that it doesnt exceed 63 characters or it will break RFC 1035 + full_worker_name=$(echo "${full_worker_name}" | cut -c 1-63) + echo "full_worker_name=$full_worker_name" >> $GITHUB_ENV + - name: Delete Deployment with Wrangler + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: delete --name ${{ env.full_worker_name }} + + - name: Output Deletion Result + run: | + echo "### Deployment URL" >> $GITHUB_STEP_SUMMARY + echo 'Deployment `${{ env.full_worker_name }}` has been deleted.' >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/.github/workflows/worker-deploy.yml b/.github/workflows/worker-deploy.yml new file mode 100644 index 0000000..dbf8b5a --- /dev/null +++ b/.github/workflows/worker-deploy.yml @@ -0,0 +1,50 @@ +name: Deploy Worker + +on: + push: + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + name: Deploy + steps: + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: "20.10.0" + + - name: Enable corepack + run: corepack enable + + - uses: actions/checkout@v4 + + - name: Update wrangler.toml Name Field + run: | + branch_name=$(echo '${{ github.event.ref }}' | sed 's#refs/heads/##' | sed 's#[^a-zA-Z0-9]#-#g') + # Extract base name from wrangler.toml + base_name=$(grep '^name = ' wrangler.toml | head -n 1 | sed 's/^name = "\(.*\)"$/\1/') + # Concatenate branch name with base name + new_name="${base_name}-${branch_name}" + # Truncate the new name to 63 characters for RFC 1035 + new_name=$(echo "$new_name" | cut -c 1-63) + # Update the wrangler.toml file + sed -i '0,/^name = .*/{s/^name = .*/name = "'"$new_name"'"/}' wrangler.toml + echo "Updated wrangler.toml name to: $new_name" + - name: Deploy with Wrangler + id: wrangler_deploy + uses: cloudflare/wrangler-action@v3 + with: + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + secrets: | + SUPABASE_URL + SUPABASE_KEY + env: + SUPABASE_URL: ${{ secrets.SUPABASE_URL }} + SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} + + - name: Write Deployment URL to Summary + run: | + echo "### Deployment URL" >> $GITHUB_STEP_SUMMARY + echo "${{ steps.wrangler_deploy.outputs.deployment-url }}" >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/package.json b/package.json index 157cf7a..18f1826 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,8 @@ { - "name": "plugin-template", + "name": "@ubiquity-os/gpt-command", "version": "1.0.0", - "description": "Ubiquibot plugin template repository with TypeScript support.", - "author": "Ubiquity DAO", + "description": "GPT command", + "author": "Ubiquity OS", "license": "MIT", "main": "src/worker.ts", "engines": { @@ -16,7 +16,7 @@ "knip": "knip --config .github/knip.ts", "knip-ci": "knip --no-exit-code --reporter json --config .github/knip.ts", "prepare": "husky install", - "test": "jest --coverage", + "test": "jest --setupFiles dotenv/config --coverage", "worker": "wrangler dev --env dev --port 4000" }, "keywords": [ @@ -32,8 +32,9 @@ "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", - "@ubiquity-dao/ubiquibot-logger": "^1.3.1", - "openai": "^4.52.7", + "@ubiquity-dao/ubiquibot-logger": "^1.3.0", + "dotenv": "^16.4.5", + "openai": "^4.63.0", "typebox-validators": "0.3.5" }, "devDependencies": { diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 005c692..9772350 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,14 +1,13 @@ import OpenAI from "openai"; import { Context } from "../types"; -import { createChatHistory, formatChatHistory } from "../utils/format-chat-history"; +import { createChatHistory, formatChatHistory } from "../helpers/format-chat-history"; import { addCommentToIssue } from "./add-comment"; -import { recursivelyFetchLinkedIssues } from "../utils/issue-fetching"; +import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; export async function askQuestion(context: Context, question: string) { if (!question) { - const log = context.logger.error(`No question provided`); - await addCommentToIssue(context, log?.logMessage.diff); + await addCommentToIssue(context, context.logger.error(`No question provided`).logMessage.diff); return; } @@ -25,8 +24,7 @@ export async function askGpt(context: Context, formattedChat: string) { } = context; if (!openAi_apiKey) { - const log = logger.error(`No OpenAI API Key detected!`); - await addCommentToIssue(context, log?.logMessage.diff); + await addCommentToIssue(context, logger.error(`No OpenAI API Key detected!`).logMessage.diff); return; } @@ -38,13 +36,11 @@ export async function askGpt(context: Context, formattedChat: string) { const res: OpenAI.Chat.Completions.ChatCompletion = await openAi.chat.completions.create({ messages: createChatHistory(formattedChat), - model: "gpt-4o", // "gpt-4o - temperature: 0, + model: "chatgpt-4o-latest", }); if (!res.choices) { - const log = logger.error(`No response from OpenAI`); - await addCommentToIssue(context, log?.logMessage.diff); + await addCommentToIssue(context, logger.error(`No response from OpenAI`).logMessage.diff); return; } diff --git a/src/utils/format-chat-history.ts b/src/helpers/format-chat-history.ts similarity index 100% rename from src/utils/format-chat-history.ts rename to src/helpers/format-chat-history.ts diff --git a/src/utils/issue-fetching.ts b/src/helpers/issue-fetching.ts similarity index 100% rename from src/utils/issue-fetching.ts rename to src/helpers/issue-fetching.ts diff --git a/src/utils/issue-handling.ts b/src/helpers/issue-handling.ts similarity index 100% rename from src/utils/issue-handling.ts rename to src/helpers/issue-handling.ts diff --git a/src/utils/issue.ts b/src/helpers/issue.ts similarity index 100% rename from src/utils/issue.ts rename to src/helpers/issue.ts diff --git a/src/types/env.ts b/src/types/env.ts index 4cbec19..98e88b3 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -1,7 +1,8 @@ import { Type as T } from "@sinclair/typebox"; import { StaticDecode } from "@sinclair/typebox"; -import "dotenv/config"; import { StandardValidator } from "typebox-validators"; +import dotenv from "dotenv"; +dotenv.config(); /** * Define sensitive environment variables here. diff --git a/wrangler.toml b/wrangler.toml index 5a0953a..3a28184 100644 --- a/wrangler.toml +++ b/wrangler.toml @@ -1,4 +1,4 @@ -name = "your-plugin-name" +name = "gpt-command" main = "src/worker.ts" compatibility_date = "2024-05-23" node_compat = true diff --git a/yarn.lock b/yarn.lock index ffff959..787d96d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2051,7 +2051,7 @@ "@typescript-eslint/types" "7.13.1" eslint-visitor-keys "^3.4.3" -"@ubiquity-dao/ubiquibot-logger@^1.3.1": +"@ubiquity-dao/ubiquibot-logger@^1.3.0": version "1.3.1" resolved "https://registry.yarnpkg.com/@ubiquity-dao/ubiquibot-logger/-/ubiquibot-logger-1.3.1.tgz#c3f45d70014dcc2551442c28101046e1c8ea6886" integrity sha512-kDLnVP87Y3yZV6NnqIEDAOz+92IW0nIcccML2lUn93uZ5ada78vfdTPtwPJo8tkXl1Z9qMKAqqHkwBMp1Ksnag== @@ -3022,6 +3022,11 @@ dot-prop@^5.1.0: dependencies: is-obj "^2.0.0" +dotenv@^16.4.5: + version "16.4.5" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" + integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== + easy-table@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/easy-table/-/easy-table-1.2.0.tgz#ba9225d7138fee307bfd4f0b5bc3c04bdc7c54eb" @@ -5213,10 +5218,10 @@ onetime@^6.0.0: dependencies: mimic-fn "^4.0.0" -openai@^4.52.7: - version "4.52.7" - resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.7.tgz#e32b000142287a9e8eda8512ba28df33d11ec1f1" - integrity sha512-dgxA6UZHary6NXUHEDj5TWt8ogv0+ibH+b4pT5RrWMjiRZVylNwLcw/2ubDrX5n0oUmHX/ZgudMJeemxzOvz7A== +openai@^4.63.0: + version "4.63.0" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.63.0.tgz#cabe7223788157c96c818317cc361386807157f7" + integrity sha512-Y9V4KODbmrOpqiOmCDVnPfMxMqKLOx8Hwcdn/r8mePq4yv7FSXGnxCs8/jZKO7zCB/IVPWihpJXwJNAIOEiZ2g== dependencies: "@types/node" "^18.11.18" "@types/node-fetch" "^2.6.4" @@ -5225,7 +5230,6 @@ openai@^4.52.7: form-data-encoder "1.7.2" formdata-node "^4.3.2" node-fetch "^2.6.7" - web-streams-polyfill "^3.2.1" optionator@^0.9.3: version "0.9.4" @@ -6418,11 +6422,6 @@ web-streams-polyfill@4.0.0-beta.3: resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== -web-streams-polyfill@^3.2.1: - version "3.3.3" - resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b" - integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw== - webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" From 96f2a8b92b2ddcc39925e0b18b64b7983b1f8679 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 09:27:27 +0100 Subject: [PATCH 43/72] chore: throw LogReturn and bubble up error comment --- src/handlers/ask-gpt.ts | 13 +------ src/handlers/comments.ts | 2 +- src/helpers/format-chat-history.ts | 2 +- src/plugin.ts | 60 ++++++++++++++++-------------- 4 files changed, 37 insertions(+), 40 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 9772350..56c6f17 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,14 +1,11 @@ import OpenAI from "openai"; import { Context } from "../types"; - import { createChatHistory, formatChatHistory } from "../helpers/format-chat-history"; -import { addCommentToIssue } from "./add-comment"; import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; export async function askQuestion(context: Context, question: string) { if (!question) { - await addCommentToIssue(context, context.logger.error(`No question provided`).logMessage.diff); - return; + throw context.logger.error(`No question provided`); } const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); @@ -24,8 +21,7 @@ export async function askGpt(context: Context, formattedChat: string) { } = context; if (!openAi_apiKey) { - await addCommentToIssue(context, logger.error(`No OpenAI API Key detected!`).logMessage.diff); - return; + throw logger.error(`No OpenAI API Key detected!`); } const openAi = new OpenAI({ apiKey: openAi_apiKey }); @@ -39,11 +35,6 @@ export async function askGpt(context: Context, formattedChat: string) { model: "chatgpt-4o-latest", }); - if (!res.choices) { - await addCommentToIssue(context, logger.error(`No response from OpenAI`).logMessage.diff); - return; - } - const answer = res.choices[0].message.content; const tokenUsage = { diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index e5fbad0..37ef255 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -29,7 +29,7 @@ export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { } export function createKey(issueUrl: string, issue?: number) { - if (!issueUrl) throw new Error("issueUrl is required"); + if (!issueUrl) throw new Error("issue.url is required to create a key"); const [, , , , issueOrg, issueRepo, , issueNumber] = issueUrl.split("/"); return `${issueOrg}/${issueRepo}/${issueNumber || issue}`; diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index fc6c0c0..0b5c20a 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -65,7 +65,7 @@ async function createContextBlockSection( const isPull = await fetchPullRequestDiff(context, org, repo, issueNumber); if (!issueNumber || isNaN(issueNumber)) { - throw new Error("Issue number is not valid"); + throw context.logger.error("Issue number is not valid"); } const specHeader = getCorrectHeaderString(isPull, issueNumber, isCurrentIssue, false); diff --git a/src/plugin.ts b/src/plugin.ts index fb38285..c40c44b 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -3,7 +3,7 @@ import { PluginInputs, SupportedEventsU } from "./types"; import { Context } from "./types"; import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; -import { Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { LogReturn, Logs } from "@ubiquity-dao/ubiquibot-logger"; import { Env } from "./types/env"; export async function plugin(inputs: PluginInputs, env: Env) { @@ -26,40 +26,46 @@ export async function runPlugin(context: Context) { logger, config: { ubiquity_os_app_slug }, } = context; + const comment = context.payload.comment.body; - if (isSupportedEvent(context.eventName)) { - const comment = context.payload.comment.body; - - if (!comment.startsWith(`@${ubiquity_os_app_slug} `)) { - return; - } + if (!comment.startsWith(`@${ubiquity_os_app_slug} `)) { + return; + } - if (context.payload.comment.user?.type === "Bot") { - logger.info("Comment is from a bot. Skipping."); - return; - } + if (context.payload.comment.user?.type === "Bot") { + logger.info("Comment is from a bot. Skipping."); + return; + } - const question = comment.replace(`@${ubiquity_os_app_slug}`, "").trim(); + const question = comment.replace(`@${ubiquity_os_app_slug}`, "").trim(); + logger.info(`Asking question: ${question}`); + let commentBody = ""; - logger.info(`Asking question: ${question}`); + try { const response = await askQuestion(context, question); + const { answer, tokenUsage } = response; - if (response) { - const { answer, tokenUsage } = response; - if (!answer) { - logger.error(`No answer from OpenAI`); - return; - } - logger.info(`Answer: ${answer}`, { tokenUsage }); - await addCommentToIssue(context, answer); + if (!answer) { + throw logger.error(`No answer from OpenAI`); + } + + logger.info(`Answer: ${answer}`, { tokenUsage }); + + commentBody = answer; + } catch (err) { + let errorMessage; + if (err instanceof LogReturn) { + errorMessage = err; + } else if (err instanceof Error) { + errorMessage = context.logger.error(err.message, { error: err }); } else { - logger.error(`No response from OpenAI`); + errorMessage = context.logger.error("An error occurred", { err }); } - } else { - logger.error(`Unsupported event: ${context.eventName}`); + commentBody = `${errorMessage?.logMessage.diff}\n`; } -} -function isSupportedEvent(eventName: string): eventName is SupportedEventsU { - return eventName === "issue_comment.created"; + await addCommentToIssue(context, commentBody); +} +function sanitizeMetadata(obj: LogReturn["metadata"]): string { + return JSON.stringify(obj, null, 2).replace(//g, ">").replace(/--/g, "--"); } From 68e793846f29ab35d4166c19e4c1eb5c2c4e4efe Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 09:30:57 +0100 Subject: [PATCH 44/72] chore: use regex for slug capture --- src/plugin.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/plugin.ts b/src/plugin.ts index c40c44b..2a0b34e 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,5 +1,5 @@ import { Octokit } from "@octokit/rest"; -import { PluginInputs, SupportedEventsU } from "./types"; +import { PluginInputs } from "./types"; import { Context } from "./types"; import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; @@ -28,7 +28,9 @@ export async function runPlugin(context: Context) { } = context; const comment = context.payload.comment.body; - if (!comment.startsWith(`@${ubiquity_os_app_slug} `)) { + const slugRegex = new RegExp(`@${ubiquity_os_app_slug} `, "gi"); + + if (!comment.match(slugRegex)) { return; } @@ -37,12 +39,11 @@ export async function runPlugin(context: Context) { return; } - const question = comment.replace(`@${ubiquity_os_app_slug}`, "").trim(); - logger.info(`Asking question: ${question}`); + logger.info(`Asking question: ${comment}`); let commentBody = ""; try { - const response = await askQuestion(context, question); + const response = await askQuestion(context, comment); const { answer, tokenUsage } = response; if (!answer) { From d7ceafae2c5e1b15fccc09ac1698246716e9a7cc Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 09:52:11 +0100 Subject: [PATCH 45/72] chore: refactor env var name and tests --- .dev.vars.example | 2 +- .env.example | 2 +- .github/workflows/worker-deploy.yml | 6 ++--- src/handlers/ask-gpt.ts | 6 ++--- src/plugin.ts | 22 ++++++++++------ src/types/env.ts | 2 +- tests/main.test.ts | 41 +++++++++++------------------ 7 files changed, 38 insertions(+), 43 deletions(-) diff --git a/.dev.vars.example b/.dev.vars.example index c9aa558..b9e5cff 100644 --- a/.dev.vars.example +++ b/.dev.vars.example @@ -1 +1 @@ -openAi_apiKey="MY_SECRET" +OPENAI_API_KEY="MY_SECRET" diff --git a/.env.example b/.env.example index c9aa558..b9e5cff 100644 --- a/.env.example +++ b/.env.example @@ -1 +1 @@ -openAi_apiKey="MY_SECRET" +OPENAI_API_KEY="MY_SECRET" diff --git a/.github/workflows/worker-deploy.yml b/.github/workflows/worker-deploy.yml index dbf8b5a..6a54c35 100644 --- a/.github/workflows/worker-deploy.yml +++ b/.github/workflows/worker-deploy.yml @@ -38,11 +38,9 @@ jobs: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} secrets: | - SUPABASE_URL - SUPABASE_KEY + OPENAI_API_KEY env: - SUPABASE_URL: ${{ secrets.SUPABASE_URL }} - SUPABASE_KEY: ${{ secrets.SUPABASE_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Write Deployment URL to Summary run: | diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 56c6f17..f883004 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -17,14 +17,14 @@ export async function askQuestion(context: Context, question: string) { export async function askGpt(context: Context, formattedChat: string) { const { logger, - env: { openAi_apiKey }, + env: { OPENAI_API_KEY }, } = context; - if (!openAi_apiKey) { + if (!OPENAI_API_KEY) { throw logger.error(`No OpenAI API Key detected!`); } - const openAi = new OpenAI({ apiKey: openAi_apiKey }); + const openAi = new OpenAI({ apiKey: OPENAI_API_KEY }); const chat = createChatHistory(formattedChat); diff --git a/src/plugin.ts b/src/plugin.ts index 2a0b34e..932d0d0 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -26,11 +26,12 @@ export async function runPlugin(context: Context) { logger, config: { ubiquity_os_app_slug }, } = context; - const comment = context.payload.comment.body; + const question = context.payload.comment.body; const slugRegex = new RegExp(`@${ubiquity_os_app_slug} `, "gi"); - if (!comment.match(slugRegex)) { + if (!question.match(slugRegex)) { + logger.info("Comment does not mention the app. Skipping."); return; } @@ -39,11 +40,16 @@ export async function runPlugin(context: Context) { return; } - logger.info(`Asking question: ${comment}`); - let commentBody = ""; + if (question.replace(slugRegex, "").trim().length === 0) { + logger.info("Comment is empty. Skipping."); + return; + } + + logger.info(`Asking question: ${question}`); + let commentToPost = ""; try { - const response = await askQuestion(context, comment); + const response = await askQuestion(context, question); const { answer, tokenUsage } = response; if (!answer) { @@ -52,7 +58,7 @@ export async function runPlugin(context: Context) { logger.info(`Answer: ${answer}`, { tokenUsage }); - commentBody = answer; + commentToPost = answer; } catch (err) { let errorMessage; if (err instanceof LogReturn) { @@ -62,10 +68,10 @@ export async function runPlugin(context: Context) { } else { errorMessage = context.logger.error("An error occurred", { err }); } - commentBody = `${errorMessage?.logMessage.diff}\n`; + commentToPost = `${errorMessage?.logMessage.diff}\n`; } - await addCommentToIssue(context, commentBody); + await addCommentToIssue(context, commentToPost); } function sanitizeMetadata(obj: LogReturn["metadata"]): string { return JSON.stringify(obj, null, 2).replace(//g, ">").replace(/--/g, "--"); diff --git a/src/types/env.ts b/src/types/env.ts index 98e88b3..46ef60a 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -12,7 +12,7 @@ dotenv.config(); * They are used with `process.env` but are type-safe. */ export const envSchema = T.Object({ - openAi_apiKey: T.String(), + OPENAI_API_KEY: T.String(), }); export const envValidator = new StandardValidator(envSchema); diff --git a/tests/main.test.ts b/tests/main.test.ts index f0382ff..9ca1c83 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -8,10 +8,10 @@ import { drop } from "@mswjs/data"; import issueTemplate from "./__mocks__/issue-template"; import repoTemplate from "./__mocks__/repo-template"; import { askQuestion } from "../src/handlers/ask-gpt"; -import { plugin, runPlugin } from "../src/plugin"; +import { runPlugin } from "../src/plugin"; -const TEST_QUESTION = "What is pi?"; -const TEST_SLASH_COMMAND = "/gpt what is pi?"; +const TEST_QUESTION = "what is pi?"; +const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?"; const LOG_CALLER = "_Logs."; const systemMsg = `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. @@ -64,16 +64,6 @@ describe("Ask plugin tests", () => { expect(res?.answer).toBe("This is a mock answer for the chat"); }); - it("should not ask GPT a question if plugin is disabled", async () => { - const ctx = createContext(TEST_SLASH_COMMAND, false); - const infoSpy = jest.spyOn(ctx.logger, "info"); - - createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - await runPlugin(ctx); - - expect(infoSpy).toHaveBeenCalledWith("The /gpt command is disabled. Enable it in the plugin settings."); - }); - it("should not ask GPT a question if comment is from a bot", async () => { const ctx = createContext(TEST_SLASH_COMMAND); const infoSpy = jest.spyOn(ctx.logger, "info"); @@ -93,17 +83,17 @@ describe("Ask plugin tests", () => { createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); await runPlugin(ctx); - expect(infoSpy).toHaveBeenCalledWith("Comment does not start with /gpt. Skipping."); + expect(infoSpy).toHaveBeenCalledWith("Comment does not mention the app. Skipping."); }); it("should not ask GPT a question if no question is provided", async () => { - const ctx = createContext("/gpt"); - const errorSpy = jest.spyOn(ctx.logger, "error"); + const ctx = createContext(`@UbiquityOS `); + const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); await runPlugin(ctx); - expect(errorSpy).toHaveBeenCalledWith("No question provided"); + expect(infoSpy).toHaveBeenCalledWith("Comment is empty. Skipping."); }); it("should not ask GPT a question if no OpenAI API key is provided", async () => { @@ -111,11 +101,10 @@ describe("Ask plugin tests", () => { const errorSpy = jest.spyOn(ctx.logger, "error"); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - ctx.env.openAi_apiKey = ""; + ctx.env.OPENAI_API_KEY = ""; await runPlugin(ctx); expect(errorSpy).toHaveBeenNthCalledWith(1, "No OpenAI API Key detected!"); - expect(errorSpy).toHaveBeenNthCalledWith(2, "No response from OpenAI"); }); it("should construct the chat history correctly", async () => { @@ -133,13 +122,13 @@ This is a demo spec for a demo task just perfect for testing. === Current Issue #1 Conversation === ubiquity/test-repo #1 === -1 ubiquity: What is pi? +1 ubiquity: what is pi? === End Current Issue #1 Conversation ===\n `; - expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: what is pi?"); + expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: @UbiquityOS what is pi?"); expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { caller: LOG_CALLER, chat: [ @@ -178,7 +167,7 @@ This is a demo spec for a demo task just perfect for testing. expect(infoSpy).toHaveBeenCalledTimes(3); - expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: what is pi?"); + expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: @UbiquityOS what is pi?"); const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === @@ -188,7 +177,7 @@ This is a demo spec for a demo task just perfect for testing. === Current Issue #1 Conversation === ubiquity/test-repo #1 === 1 ubiquity: More context here #2 -2 ubiquity: What is pi? +2 ubiquity: what is pi? === End Current Issue #1 Conversation === === Linked Issue #2 Specification === ubiquity/test-repo/2 === @@ -318,8 +307,10 @@ function createContext(body = TEST_SLASH_COMMAND, isEnabled = true) { }, logger: new Logs("debug"), config: { - isEnabled, - openAi_apiKey: "test", + ubiquity_os_app_slug: "UbiquityOS", + }, + env: { + OPENAI_API_KEY: "test", }, octokit: new octokit.Octokit(), eventName: "issue_comment.created" as SupportedEventsU, From 3ca87406361aea83a8aa9243265530a1bb82bb6e Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:12:27 +0100 Subject: [PATCH 46/72] chore: format --- .github/workflows/jest-testing.yml | 2 +- .github/workflows/knip-reporter.yml | 2 +- .github/workflows/worker-delete.yml | 2 +- .github/workflows/worker-deploy.yml | 2 +- manifest.json | 10 +- package.json | 3 - src/plugin.ts | 2 +- src/types/context.ts | 2 +- src/types/env.ts | 4 +- src/types/github.ts | 1 + src/types/gpt.ts | 2 +- tests/main.test.ts | 6 +- yarn.lock | 229 +--------------------------- 13 files changed, 19 insertions(+), 248 deletions(-) diff --git a/.github/workflows/jest-testing.yml b/.github/workflows/jest-testing.yml index f7cebb7..7f8747e 100644 --- a/.github/workflows/jest-testing.yml +++ b/.github/workflows/jest-testing.yml @@ -24,4 +24,4 @@ jobs: - name: Add Jest Report to Summary if: always() - run: echo "$(cat test-dashboard.md)" >> $GITHUB_STEP_SUMMARY \ No newline at end of file + run: echo "$(cat test-dashboard.md)" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/knip-reporter.yml b/.github/workflows/knip-reporter.yml index b344507..a396735 100644 --- a/.github/workflows/knip-reporter.yml +++ b/.github/workflows/knip-reporter.yml @@ -37,4 +37,4 @@ jobs: json_input: true json_input_file_name: knip-results.json pull_request_number: ${{ steps.pr-number.outputs.content }} - token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/worker-delete.yml b/.github/workflows/worker-delete.yml index 75b9c91..f715a20 100644 --- a/.github/workflows/worker-delete.yml +++ b/.github/workflows/worker-delete.yml @@ -41,4 +41,4 @@ jobs: - name: Output Deletion Result run: | echo "### Deployment URL" >> $GITHUB_STEP_SUMMARY - echo 'Deployment `${{ env.full_worker_name }}` has been deleted.' >> $GITHUB_STEP_SUMMARY \ No newline at end of file + echo 'Deployment `${{ env.full_worker_name }}` has been deleted.' >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/worker-deploy.yml b/.github/workflows/worker-deploy.yml index 6a54c35..cba478c 100644 --- a/.github/workflows/worker-deploy.yml +++ b/.github/workflows/worker-deploy.yml @@ -45,4 +45,4 @@ jobs: - name: Write Deployment URL to Summary run: | echo "### Deployment URL" >> $GITHUB_STEP_SUMMARY - echo "${{ steps.wrangler_deploy.outputs.deployment-url }}" >> $GITHUB_STEP_SUMMARY \ No newline at end of file + echo "${{ steps.wrangler_deploy.outputs.deployment-url }}" >> $GITHUB_STEP_SUMMARY diff --git a/manifest.json b/manifest.json index f947209..0ac072f 100644 --- a/manifest.json +++ b/manifest.json @@ -1,7 +1,5 @@ { - "name": "gpt", - "description": "gpt", - "ubiquity:listeners": [ - "issue_comment.created" - ] -} \ No newline at end of file + "name": "gpt", + "description": "gpt", + "ubiquity:listeners": ["issue_comment.created"] +} diff --git a/package.json b/package.json index 18f1826..075342d 100644 --- a/package.json +++ b/package.json @@ -27,8 +27,6 @@ "open-source" ], "dependencies": { - "@actions/core": "1.10.1", - "@actions/github": "6.0.0", "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", @@ -63,7 +61,6 @@ "npm-run-all": "4.1.5", "prettier": "3.3.2", "ts-jest": "29.1.5", - "tsx": "4.15.6", "typescript": "5.4.5", "typescript-eslint": "7.13.1", "wrangler": "3.60.3" diff --git a/src/plugin.ts b/src/plugin.ts index 932d0d0..4fb5147 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -64,7 +64,7 @@ export async function runPlugin(context: Context) { if (err instanceof LogReturn) { errorMessage = err; } else if (err instanceof Error) { - errorMessage = context.logger.error(err.message, { error: err }); + errorMessage = context.logger.error(err.message, { error: err, stack: err.stack }); } else { errorMessage = context.logger.error("An error occurred", { err }); } diff --git a/src/types/context.ts b/src/types/context.ts index 180ecc0..d5f7113 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -16,5 +16,5 @@ export interface Context; config: PluginSettings; logger: Logs; - env: Env + env: Env; } diff --git a/src/types/env.ts b/src/types/env.ts index 46ef60a..eef600a 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -12,9 +12,9 @@ dotenv.config(); * They are used with `process.env` but are type-safe. */ export const envSchema = T.Object({ - OPENAI_API_KEY: T.String(), + OPENAI_API_KEY: T.String(), }); export const envValidator = new StandardValidator(envSchema); -export type Env = StaticDecode; \ No newline at end of file +export type Env = StaticDecode; diff --git a/src/types/github.ts b/src/types/github.ts index 313f939..eef93c0 100644 --- a/src/types/github.ts +++ b/src/types/github.ts @@ -11,6 +11,7 @@ export type FetchParams = { owner?: string; repo?: string; }; + export type LinkedIssues = { issueNumber: number; repo: string; diff --git a/src/types/gpt.ts b/src/types/gpt.ts index 0b2a5ce..5bfaa19 100644 --- a/src/types/gpt.ts +++ b/src/types/gpt.ts @@ -12,7 +12,7 @@ export type StreamlinedComment = { }; export type StreamlinedComments = { - issue: number; + issueNumber: number; repo: string; org: string; comments: StreamlinedComment[]; diff --git a/tests/main.test.ts b/tests/main.test.ts index 9ca1c83..d773fbf 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -17,7 +17,7 @@ const LOG_CALLER = "_Logs."; const systemMsg = `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. -Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.` +Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`; type Comment = { id: number; @@ -126,8 +126,6 @@ This is a demo spec for a demo task just perfect for testing. === End Current Issue #1 Conversation ===\n `; - - expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: @UbiquityOS what is pi?"); expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { caller: LOG_CALLER, @@ -293,7 +291,7 @@ function createComments(comments: Comment[]) { } } -function createContext(body = TEST_SLASH_COMMAND, isEnabled = true) { +function createContext(body = TEST_SLASH_COMMAND) { const user = db.users.findFirst({ where: { id: { equals: 1 } } }); return { payload: { diff --git a/yarn.lock b/yarn.lock index 787d96d..18a112c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,32 +2,6 @@ # yarn lockfile v1 -"@actions/core@1.10.1": - version "1.10.1" - resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.1.tgz#61108e7ac40acae95ee36da074fa5850ca4ced8a" - integrity sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g== - dependencies: - "@actions/http-client" "^2.0.1" - uuid "^8.3.2" - -"@actions/github@6.0.0": - version "6.0.0" - resolved "https://registry.yarnpkg.com/@actions/github/-/github-6.0.0.tgz#65883433f9d81521b782a64cc1fd45eef2191ea7" - integrity sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g== - dependencies: - "@actions/http-client" "^2.2.0" - "@octokit/core" "^5.0.1" - "@octokit/plugin-paginate-rest" "^9.0.0" - "@octokit/plugin-rest-endpoint-methods" "^10.0.0" - -"@actions/http-client@^2.0.1", "@actions/http-client@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.2.1.tgz#ed3fe7a5a6d317ac1d39886b0bb999ded229bb38" - integrity sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw== - dependencies: - tunnel "^0.0.6" - undici "^5.25.4" - "@ampproject/remapping@^2.2.0": version "2.3.0" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" @@ -946,231 +920,116 @@ escape-string-regexp "^4.0.0" rollup-plugin-node-polyfills "^0.2.1" -"@esbuild/aix-ppc64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" - integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== - "@esbuild/android-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd" integrity sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA== -"@esbuild/android-arm64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" - integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== - "@esbuild/android-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d" integrity sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A== -"@esbuild/android-arm@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" - integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== - "@esbuild/android-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1" integrity sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww== -"@esbuild/android-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" - integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== - "@esbuild/darwin-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276" integrity sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg== -"@esbuild/darwin-arm64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" - integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== - "@esbuild/darwin-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb" integrity sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw== -"@esbuild/darwin-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" - integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== - "@esbuild/freebsd-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2" integrity sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ== -"@esbuild/freebsd-arm64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" - integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== - "@esbuild/freebsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4" integrity sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ== -"@esbuild/freebsd-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" - integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== - "@esbuild/linux-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb" integrity sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg== -"@esbuild/linux-arm64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" - integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== - "@esbuild/linux-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a" integrity sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA== -"@esbuild/linux-arm@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" - integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== - "@esbuild/linux-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a" integrity sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ== -"@esbuild/linux-ia32@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" - integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== - "@esbuild/linux-loong64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72" integrity sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ== -"@esbuild/linux-loong64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" - integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== - "@esbuild/linux-mips64el@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289" integrity sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A== -"@esbuild/linux-mips64el@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" - integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== - "@esbuild/linux-ppc64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7" integrity sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg== -"@esbuild/linux-ppc64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" - integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== - "@esbuild/linux-riscv64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09" integrity sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA== -"@esbuild/linux-riscv64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" - integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== - "@esbuild/linux-s390x@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829" integrity sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q== -"@esbuild/linux-s390x@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" - integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== - "@esbuild/linux-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4" integrity sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw== -"@esbuild/linux-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" - integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== - "@esbuild/netbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462" integrity sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q== -"@esbuild/netbsd-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" - integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== - "@esbuild/openbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691" integrity sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g== -"@esbuild/openbsd-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" - integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== - "@esbuild/sunos-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273" integrity sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg== -"@esbuild/sunos-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" - integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== - "@esbuild/win32-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f" integrity sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag== -"@esbuild/win32-arm64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" - integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== - "@esbuild/win32-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03" integrity sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw== -"@esbuild/win32-ia32@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" - integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== - "@esbuild/win32-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061" integrity sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA== -"@esbuild/win32-x64@0.21.5": - version "0.21.5" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" - integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== - "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -1602,7 +1461,7 @@ resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-4.0.0.tgz#40d203ea827b9f17f42a29c6afb93b7745ef80c7" integrity sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA== -"@octokit/core@^5.0.1", "@octokit/core@^5.0.2": +"@octokit/core@^5.0.2": version "5.2.0" resolved "https://registry.yarnpkg.com/@octokit/core/-/core-5.2.0.tgz#ddbeaefc6b44a39834e1bb2e58a49a117672a7ea" integrity sha512-1LFfa/qnMQvEOAdzlQymH0ulepxbxnCYAKJZfMci/5XJyIHWgEYnDmgnKakbTh7CH2tFQ5O60oYDvns4i9RAIg== @@ -1632,11 +1491,6 @@ "@octokit/types" "^13.0.0" universal-user-agent "^6.0.0" -"@octokit/openapi-types@^20.0.0": - version "20.0.0" - resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-20.0.0.tgz#9ec2daa0090eeb865ee147636e0c00f73790c6e5" - integrity sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA== - "@octokit/openapi-types@^22.2.0": version "22.2.0" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-22.2.0.tgz#75aa7dcd440821d99def6a60b5f014207ae4968e" @@ -1654,13 +1508,6 @@ dependencies: "@octokit/types" "^13.5.0" -"@octokit/plugin-paginate-rest@^9.0.0": - version "9.2.1" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz#2e2a2f0f52c9a4b1da1a3aa17dabe3c459b9e401" - integrity sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw== - dependencies: - "@octokit/types" "^12.6.0" - "@octokit/plugin-request-log@^4.0.0": version "4.0.1" resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz#98a3ca96e0b107380664708111864cb96551f958" @@ -1673,13 +1520,6 @@ dependencies: "@octokit/types" "^13.5.0" -"@octokit/plugin-rest-endpoint-methods@^10.0.0": - version "10.4.1" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz#41ba478a558b9f554793075b2e20cd2ef973be17" - integrity sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg== - dependencies: - "@octokit/types" "^12.6.0" - "@octokit/request-error@^5.1.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-5.1.0.tgz#ee4138538d08c81a60be3f320cd71063064a3b30" @@ -1716,13 +1556,6 @@ "@octokit/plugin-request-log" "^4.0.0" "@octokit/plugin-rest-endpoint-methods" "13.2.2" -"@octokit/types@^12.6.0": - version "12.6.0" - resolved "https://registry.yarnpkg.com/@octokit/types/-/types-12.6.0.tgz#8100fb9eeedfe083aae66473bd97b15b62aedcb2" - integrity sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw== - dependencies: - "@octokit/openapi-types" "^20.0.0" - "@octokit/types@^13.0.0", "@octokit/types@^13.1.0", "@octokit/types@^13.5.0": version "13.5.0" resolved "https://registry.yarnpkg.com/@octokit/types/-/types-13.5.0.tgz#4796e56b7b267ebc7c921dcec262b3d5bfb18883" @@ -3190,35 +3023,6 @@ esbuild@0.17.19: "@esbuild/win32-ia32" "0.17.19" "@esbuild/win32-x64" "0.17.19" -esbuild@~0.21.4: - version "0.21.5" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" - integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw== - optionalDependencies: - "@esbuild/aix-ppc64" "0.21.5" - "@esbuild/android-arm" "0.21.5" - "@esbuild/android-arm64" "0.21.5" - "@esbuild/android-x64" "0.21.5" - "@esbuild/darwin-arm64" "0.21.5" - "@esbuild/darwin-x64" "0.21.5" - "@esbuild/freebsd-arm64" "0.21.5" - "@esbuild/freebsd-x64" "0.21.5" - "@esbuild/linux-arm" "0.21.5" - "@esbuild/linux-arm64" "0.21.5" - "@esbuild/linux-ia32" "0.21.5" - "@esbuild/linux-loong64" "0.21.5" - "@esbuild/linux-mips64el" "0.21.5" - "@esbuild/linux-ppc64" "0.21.5" - "@esbuild/linux-riscv64" "0.21.5" - "@esbuild/linux-s390x" "0.21.5" - "@esbuild/linux-x64" "0.21.5" - "@esbuild/netbsd-x64" "0.21.5" - "@esbuild/openbsd-x64" "0.21.5" - "@esbuild/sunos-x64" "0.21.5" - "@esbuild/win32-arm64" "0.21.5" - "@esbuild/win32-ia32" "0.21.5" - "@esbuild/win32-x64" "0.21.5" - escalade@^3.1.1, escalade@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27" @@ -3578,7 +3382,7 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -fsevents@^2.3.2, fsevents@~2.3.2, fsevents@~2.3.3: +fsevents@^2.3.2, fsevents@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== @@ -3671,13 +3475,6 @@ get-symbol-description@^1.0.2: es-errors "^1.3.0" get-intrinsic "^1.2.4" -get-tsconfig@^4.7.5: - version "4.7.5" - resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.5.tgz#5e012498579e9a6947511ed0cd403272c7acbbaf" - integrity sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw== - dependencies: - resolve-pkg-maps "^1.0.0" - git-raw-commits@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/git-raw-commits/-/git-raw-commits-4.0.0.tgz#b212fd2bff9726d27c1283a1157e829490593285" @@ -5596,11 +5393,6 @@ resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== -resolve-pkg-maps@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f" - integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw== - resolve.exports@^2.0.0, resolve.exports@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" @@ -6190,21 +5982,6 @@ tslib@^2.2.0, tslib@^2.6.2: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== -tsx@4.15.6: - version "4.15.6" - resolved "https://registry.yarnpkg.com/tsx/-/tsx-4.15.6.tgz#4522ed093f7fa54f031a7a999274e8b35dbf3165" - integrity sha512-is0VQQlfNZRHEuSSTKA6m4xw74IU4AizmuB6lAYLRt9XtuyeQnyJYexhNZOPCB59SqC4JzmSzPnHGBXxf3k0hA== - dependencies: - esbuild "~0.21.4" - get-tsconfig "^4.7.5" - optionalDependencies: - fsevents "~2.3.3" - -tunnel@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" - integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== - type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" @@ -6310,7 +6087,7 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== -undici@^5.25.4, undici@^5.28.2: +undici@^5.28.2: version "5.28.4" resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" integrity sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g== From 742678a9b555e66c741e5378a5f1e1e2e7b828ab Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:16:46 +0100 Subject: [PATCH 47/72] chore: slight fetch and handling improvements --- src/helpers/issue-fetching.ts | 164 +++++++++++++++++----------------- src/helpers/issue-handling.ts | 93 +++++++++++-------- src/helpers/issue.ts | 49 +++++----- 3 files changed, 164 insertions(+), 142 deletions(-) diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 8e1efc9..7ddd81c 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -3,19 +3,94 @@ import { Context } from "../types"; import { FetchParams, Issue, IssueComments, LinkedIssues, ReviewComments } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; import { dedupeStreamlinedComments, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; -import { handleIssue, handleSpec, handleSpecAndBodyKeys } from "./issue-handling"; +import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; + + export async function recursivelyFetchLinkedIssues(params: FetchParams) { - const { linkedIssues, seen, specOrBodies, streamlinedComments } = await fetchLinkedIssues(params); + const { linkedIssues, seen, specAndBodies, streamlinedComments } = await fetchLinkedIssues(params); + + const fetchPromises = linkedIssues.map(async (linkedIssue) => + await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen) + ); + await throttlePromises(fetchPromises, 10); - const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); - await Promise.allSettled(fetchPromises); + const linkedIssuesKeys = linkedIssues.map((issue) => + createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`) + ); + const specAndBodyKeys = Array.from( + new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys]) + ); - const linkedIssuesKeys = linkedIssues.map((issue) => createKey(issue.url, issue.issueNumber)); - const specAndBodyKeys = Array.from(new Set([...Object.keys(specOrBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); + return { linkedIssues, specAndBodies, streamlinedComments }; +} + +export async function fetchLinkedIssues(params: FetchParams) { + const { comments, issue } = await fetchIssueComments(params); + const issueKey = createKey(issue.html_url); + const [owner, repo, issueNumber] = splitKey(issueKey); + const linkedIssues: LinkedIssues[] = [ + { body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }, + ]; + const specAndBodies: Record = {}; + const seen = new Set(); + + // add the spec body as a comment + comments.push({ + body: issue.body || "", + // @ts-expect-error - github types undefined + user: issue.user, + id: issue.id, + html_url: issue.html_url, + }); + + for (const comment of comments) { + const foundIssues = idIssueFromComment(comment.body); + if (foundIssues) { + for (const linkedIssue of foundIssues) { + const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); + if (seen.has(linkedKey)) { + continue; + } + seen.add(linkedKey); + const { issueNumber, owner, repo } = linkedIssue; + + const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ + context: params.context, + issueNum: issueNumber, + owner, + repo, + }); + + specAndBodies[linkedKey] = fetchedIssue.body || ""; + linkedIssue.body = fetchedIssue.body || ""; + linkedIssue.comments = fetchedComments; + linkedIssues.push(linkedIssue); + } + } + } + + return { streamlinedComments: await getAllStreamlinedComments(linkedIssues), linkedIssues, specAndBodies, seen }; +} + +export async function mergeCommentsAndFetchSpec( + params: FetchParams, + linkedIssue: LinkedIssues, + streamlinedComments: Record, + specOrBodies: Record, + seen: Set +) { - return { linkedIssues, specAndBodies: specOrBodies, streamlinedComments }; + if (linkedIssue.comments) { + const streamed = await getAllStreamlinedComments([linkedIssue]); + const merged = mergeStreamlinedComments(streamlinedComments, streamed); + streamlinedComments = { ...streamlinedComments, ...merged }; + } + + if (linkedIssue.body) { + await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); + } } export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { @@ -40,7 +115,7 @@ export async function fetchIssue(params: FetchParams) { const { octokit, payload } = params.context; const { issueNum, owner, repo } = params; - return await octokit.issues + return await octokit.rest.issues .get({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, @@ -57,10 +132,6 @@ export async function fetchIssueComments(params: FetchParams) { let comments; if (issue.pull_request) { - /** - * With every review comment with a tagged code line we have `diff_hunk` which is great context - * but could easily max our tokens. - */ comments = await octokit.paginate(octokit.pulls.listReviewComments, { owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, @@ -80,55 +151,6 @@ export async function fetchIssueComments(params: FetchParams) { }; } -export async function fetchLinkedIssues(params: FetchParams) { - const { comments, issue } = await fetchIssueComments(params); - const issueKey = createKey(issue.url); - const [owner, repo, issueNumber] = splitKey(issueKey); - const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.url }]; - - const specOrBodies: Record = {}; - specOrBodies[issueKey] = issue.body || ""; - - const seen = new Set(); - seen.add(issueKey); - - for (const comment of comments) { - let url = ""; - if ("issue_url" in comment) { - url = comment.issue_url; - } else if ("pull_request_url" in comment) { - url = comment.pull_request_url; - } - - const key = createKey(url); - const linkedIssue = idIssueFromComment(key.split("/")[0], comment.body, { - repo: key.split("/")[1], - issueNum: parseInt(key.split("/")[2]), - context: params.context, - }); - - if (linkedIssue) { - const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); - seen.add(linkedKey); - const [owner, repo, issueNumber] = splitKey(linkedKey); - - const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ - context: params.context, - issueNum: parseInt(issueNumber), - owner, - repo, - }); - - specOrBodies[linkedKey] = fetchedIssue.body || ""; - linkedIssue.body = fetchedIssue.body || ""; - linkedIssue.comments = fetchedComments; - linkedIssues.push(linkedIssue); - } - } - - return { streamlinedComments: await getAllStreamlinedComments(linkedIssues), linkedIssues, specOrBodies, seen }; -} - export async function fetchAndHandleIssue( key: string, params: FetchParams, @@ -138,22 +160,4 @@ export async function fetchAndHandleIssue( const [owner, repo, issueNumber] = splitKey(key); await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNumber) }, streamlinedComments, seen); return streamlinedComments[key] || []; -} - -export async function fetchCommentsAndHandleSpec( - params: FetchParams, - linkedIssue: LinkedIssues, - streamlinedComments: Record, - specOrBodies: Record, - seen: Set -) { - if (linkedIssue.comments) { - const streamed = await getAllStreamlinedComments([linkedIssue]); - const merged = mergeStreamlinedComments(streamlinedComments, streamed); - streamlinedComments = { ...streamlinedComments, ...merged }; - } - - if (linkedIssue.body) { - await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); - } -} +} \ No newline at end of file diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 0371e81..067fa2c 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -2,15 +2,15 @@ import { createKey } from "../handlers/comments"; import { FetchParams } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; -import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, fetchCommentsAndHandleSpec } from "./issue-fetching"; +import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, mergeCommentsAndFetchSpec } from "./issue-fetching"; -export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen?: Set) { - if (alreadySeen && alreadySeen.has(createKey(`////${params.owner}/${params.repo}/${params.issueNum}`))) { +export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen: Set) { + if (alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { return; } - const { linkedIssues, seen, specOrBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); - const fetchPromises = linkedIssues.map((linkedIssue) => fetchCommentsAndHandleSpec(params, linkedIssue, streamlinedComments, specOrBodies, seen)); - await Promise.allSettled(fetchPromises); + const { linkedIssues, seen, specAndBodies, streamlinedComments: streamlined } = await fetchLinkedIssues(params); + const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); + await throttlePromises(fetchPromises, 10); return mergeStreamlinedComments(streamlinedComments, streamlined); } @@ -23,28 +23,29 @@ export async function handleSpec( streamlinedComments: Record ) { specAndBodies[key] = specOrBody; - const [owner, repo, issueNumber] = splitKey(key); - const anotherReferencedIssue = idIssueFromComment(owner, specOrBody, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); + const otherReferences = idIssueFromComment(specOrBody); - if (anotherReferencedIssue) { - const anotherKey = createKey(anotherReferencedIssue.url, anotherReferencedIssue.issueNumber); - if (seen.has(anotherKey)) { - return; - } - seen.add(anotherKey); - const issue = await fetchIssue({ - ...params, - owner: anotherReferencedIssue.owner, - repo: anotherReferencedIssue.repo, - issueNum: anotherReferencedIssue.issueNumber, - }); - if (issue.body) { - specAndBodies[anotherKey] = issue.body; - } - const [owner, repo, issueNum] = splitKey(anotherKey); - if (!streamlinedComments[anotherKey]) { - await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); - await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue.body || "", specAndBodies, anotherKey, seen, streamlinedComments); + if (otherReferences) { + for (const ref of otherReferences) { + const anotherKey = createKey(ref.url, ref.issueNumber); + if (seen.has(anotherKey)) { + return; + } + seen.add(anotherKey); + const issue = await fetchIssue({ + ...params, + owner: ref.owner, + repo: ref.repo, + issueNum: ref.issueNumber, + }); + if (issue.body) { + specAndBodies[anotherKey] = issue.body; + } + const [owner, repo, issueNum] = splitKey(anotherKey); + if (!streamlinedComments[anotherKey]) { + await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); + await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue.body || "", specAndBodies, anotherKey, seen, streamlinedComments); + } } } @@ -57,15 +58,16 @@ export async function handleComment( streamlinedComments: Record, seen: Set ) { - const [, , , , owner, repo, , issueNumber] = comment.issueUrl.split("/"); - const anotherReferencedIssue = idIssueFromComment(owner, comment.body, { ...params, owner, repo, issueNum: parseInt(issueNumber) }); + const otherReferences = idIssueFromComment(comment.body); - if (anotherReferencedIssue) { - const key = createKey(anotherReferencedIssue.url); - const [refOwner, refRepo, refIssueNumber] = splitKey(key); + if (otherReferences) { + for (const ref of otherReferences) { + const key = createKey(ref.url); + const [refOwner, refRepo, refIssueNumber] = splitKey(key); - if (!streamlinedComments[key]) { - await handleIssue({ ...params, owner: refOwner, repo: refRepo, issueNum: parseInt(refIssueNumber) }, streamlinedComments, seen); + if (!streamlinedComments[key]) { + await handleIssue({ ...params, owner: refOwner, repo: refRepo, issueNum: parseInt(refIssueNumber) }, streamlinedComments, seen); + } } } } @@ -76,8 +78,25 @@ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, if (!comments || comments.length === 0) { comments = await fetchAndHandleIssue(key, params, streamlinedComments, seen); } - return Promise.all(comments.map((comment: StreamlinedComment) => handleComment(params, comment, streamlinedComments, seen))); - }); - await Promise.all(commentProcessingPromises); + for (const comment of comments) { + await handleComment(params, comment, streamlinedComments, seen); + } + }) + + await throttlePromises(commentProcessingPromises, 10); } + +export async function throttlePromises(promises: Promise[], limit: number) { + const executing: Promise[] = []; + for (const promise of promises) { + executing.push(Promise.resolve(promise)) + if (executing.length >= limit) { + await Promise.race(executing); + const index = executing.indexOf(promise); + executing.splice(index, 1); + } + } + + return Promise.all(executing); +} \ No newline at end of file diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 2eb0228..77550cb 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -1,4 +1,5 @@ -import { FetchParams, LinkedIssues } from "../types/github"; +import { createKey } from "../handlers/comments"; +import { LinkedIssues } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; export function dedupeStreamlinedComments(streamlinedComments: Record) { @@ -32,31 +33,29 @@ export function splitKey(key: string): [string, string, string] { return [parts[0], parts[1], parts[2]]; } -export function idIssueFromComment(owner?: string, comment?: string | null, params?: FetchParams): LinkedIssues | null { - if (!comment) { - return null; - } - - const urlMatch = comment.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/); - const hashMatch = comment.match(/#(\d+)/); +export function idIssueFromComment(comment?: string | null): LinkedIssues[] | null { + const urlMatch = comment?.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/g); + const response: LinkedIssues[] = []; - if (hashMatch) { - return { - owner: owner || params?.owner || "", - repo: params?.repo || "", - issueNumber: parseInt(hashMatch[1]), - url: `https://api.github.com/repos/${params?.owner || owner}/${params?.repo}/issues/${hashMatch[1]}`, - } as LinkedIssues; + if (urlMatch && urlMatch.length > 0) { + urlMatch.forEach((url) => { + response.push(createLinkedIssueOrPr(url)); + }); } - if (urlMatch) { - return { - url: `https://api.github.com/repos/${urlMatch[1]}/${urlMatch[2]}/issues/${urlMatch[4]}`, - owner: owner ?? urlMatch[1], - repo: urlMatch[2], - issueNumber: parseInt(urlMatch[4]), - } as LinkedIssues; - } - - return null; + return response; } + +function createLinkedIssueOrPr( + url: string +): LinkedIssues { + const key = createKey(url); + const [owner, repo, issueNumber] = splitKey(key); + + return { + owner, + repo, + issueNumber: parseInt(issueNumber), + url, + }; +} \ No newline at end of file From 1701446f4f437aac8b2cf128347b27df4546242c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:19:52 +0100 Subject: [PATCH 48/72] chore: more robust key creation, minor fixes --- src/handlers/ask-gpt.ts | 2 +- src/handlers/comments.ts | 51 ++++++++++++++++++++++-------- src/helpers/format-chat-history.ts | 18 ++++++----- 3 files changed, 49 insertions(+), 22 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index f883004..15f77a2 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -9,8 +9,8 @@ export async function askQuestion(context: Context, question: string) { } const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); - const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); return await askGpt(context, formattedChat); } diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index 37ef255..b567240 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -1,3 +1,4 @@ +import { splitKey } from "../helpers/issue"; import { IssueComments, LinkedIssues, ReviewComments } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; @@ -29,10 +30,39 @@ export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { } export function createKey(issueUrl: string, issue?: number) { - if (!issueUrl) throw new Error("issue.url is required to create a key"); - const [, , , , issueOrg, issueRepo, , issueNumber] = issueUrl.split("/"); + const urlParts = issueUrl.split("/"); - return `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + let key = ""; + + if (urlParts.length === 7) { + const [, , , issueOrg, issueRepo, , issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber}`; + } + + if (urlParts.length === 5) { + const [, , issueOrg, issueRepo] = urlParts; + key = `${issueOrg}/${issueRepo}/${issue}`; + } + + if (urlParts.length === 8) { + const [, , , issueOrg, issueRepo, , , issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + } + + if (urlParts.length === 3) { + const [issueOrg, issueRepo, issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + } + + if (!key) { + throw new Error("Invalid issue url"); + } + + if (key.includes("#")) { + key = key.split("#")[0]; + } + + return key; } export function streamlineComments(comments: IssueComments | ReviewComments) { @@ -44,15 +74,10 @@ export function streamlineComments(comments: IssueComments | ReviewComments) { continue; } - let url = ""; - if ("issue_url" in comment) { - url = comment.issue_url; - } else if ("pull_request_url" in comment) { - url = comment.pull_request_url; - } - + const url = comment.html_url; const body = comment.body; const key = createKey(url); + const [owner, repo] = splitKey(key); if (!streamlined[key]) { streamlined[key] = []; @@ -63,11 +88,11 @@ export function streamlineComments(comments: IssueComments | ReviewComments) { user: user.login, body, id: comment.id, - org: url.split("/")[4], - repo: url.split("/")[5], + org: owner, + repo, issueUrl: url, }); } } return streamlined; -} +} \ No newline at end of file diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 0b5c20a..e795310 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -7,10 +7,9 @@ import { fetchPullRequestDiff, fetchIssue } from "./issue-fetching"; export async function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record) { const convoKeys = Object.keys(streamlined); const specAndBodyKeys = Object.keys(specAndBodies); - const keys: string[] = Array.from(new Set([...convoKeys, ...specAndBodyKeys])); const chatHistory: string[] = []; - const currentIssueKey = createKey(context.payload.issue.url); - + const currentIssueKey = createKey(context.payload.issue.html_url); + const keys: string[] = Array.from(new Set([...convoKeys, ...specAndBodyKeys, currentIssueKey])); for (const key of keys) { const isCurrentIssue = key === currentIssueKey; const block = await createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue); @@ -59,9 +58,9 @@ async function createContextBlockSection( isCurrentIssue: boolean ) { const comments = streamlined[key]; - const [org, repo, _issue, issue] = key.split("/"); + const [org, repo, issueNum] = key.split("/"); - const issueNumber = parseInt(issue ?? _issue); + const issueNumber = parseInt(issueNum); const isPull = await fetchPullRequestDiff(context, org, repo, issueNumber); if (!issueNumber || isNaN(issueNumber)) { @@ -90,7 +89,7 @@ async function createContextBlockSection( const block = [ specOrBodyBlock.join(""), createHeader(header, repoString), - createComment({ issue: parseInt(issue), repo, org, comments }), + createComment({ issueNumber, repo, org, comments }), createFooter(header), ]; @@ -112,10 +111,13 @@ function createFooter(content: string) { } function createComment(comment: StreamlinedComments) { + if (!comment.comments) { + return ""; + } const comments = []; // filter dupes - comment.comments = comment.comments.filter((c, i, a) => a.findIndex((cc) => cc.id === c.id) === i); + comment.comments = comment.comments?.filter((c, i, a) => a.findIndex((cc) => cc.id === c.id) === i); for (const c of comment.comments) { comments.push(`${c.id} ${c.user}: ${c.body}\n`); @@ -135,7 +137,7 @@ export function createChatHistory(formattedChat: string) { content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. -Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.` +Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, }; const userMessage: ChatCompletionMessageParam = { From c687b8ba88eae08a85a018b9c71d514505ad830c Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:07:42 +0100 Subject: [PATCH 49/72] chore: format and add hashMatch from body --- src/handlers/comments.ts | 2 +- src/helpers/format-chat-history.ts | 40 +++++++++++++++++++----------- src/helpers/issue-fetching.ts | 23 +++++------------ src/helpers/issue-handling.ts | 20 +++++++++------ src/helpers/issue.ts | 22 +++++++++++----- tests/main.test.ts | 2 +- 6 files changed, 61 insertions(+), 48 deletions(-) diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index b567240..392f8da 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -95,4 +95,4 @@ export function streamlineComments(comments: IssueComments | ReviewComments) { } } return streamlined; -} \ No newline at end of file +} diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index e795310..5d369fe 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -1,8 +1,9 @@ import { ChatCompletionMessageParam } from "openai/resources"; import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; -import { createKey } from "../handlers/comments"; -import { fetchPullRequestDiff, fetchIssue } from "./issue-fetching"; +import { createKey, streamlineComments } from "../handlers/comments"; +import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; +import { splitKey } from "./issue"; export async function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record) { const convoKeys = Object.keys(streamlined); @@ -10,6 +11,7 @@ export async function formatChatHistory(context: Context, streamlined: Record, isCurrentIssue: boolean ) { - const comments = streamlined[key]; + let comments = streamlined[key]; + + if (!comments || comments.length === 0) { + const [owner, repo, number] = splitKey(key); + const { comments: comments_ } = await fetchIssueComments({ + context, + owner, + repo, + issueNum: parseInt(number), + }); + + comments = streamlineComments(comments_)[key]; + } + const [org, repo, issueNum] = key.split("/"); const issueNumber = parseInt(issueNum); - const isPull = await fetchPullRequestDiff(context, org, repo, issueNumber); + const prDiff = await fetchPullRequestDiff(context, org, repo, issueNumber); if (!issueNumber || isNaN(issueNumber)) { throw context.logger.error("Issue number is not valid"); } - const specHeader = getCorrectHeaderString(isPull, issueNumber, isCurrentIssue, false); + const specHeader = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, false); let specOrBody = specAndBodies[key]; if (!specOrBody) { @@ -79,25 +94,20 @@ async function createContextBlockSection( repo, issueNum: issueNumber, }) - ).body || "No specification or body available"; + )?.body || "No specification or body available"; } const specOrBodyBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader)]; - const header = getCorrectHeaderString(isPull, issueNumber, isCurrentIssue, true); + const header = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, true); const repoString = `${org}/${repo} #${issueNumber}`; - const block = [ - specOrBodyBlock.join(""), - createHeader(header, repoString), - createComment({ issueNumber, repo, org, comments }), - createFooter(header), - ]; + const block = [specOrBodyBlock.join(""), createHeader(header, repoString), createComment({ issueNumber, repo, org, comments }), createFooter(header)]; - if (!isPull) { + if (!prDiff) { return block.join(""); } - const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), isPull, createFooter("Linked Pull Request Code Diff")]; + const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), prDiff, createFooter("Linked Pull Request Code Diff")]; return block.concat(diffBlock).join(""); } diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 7ddd81c..f61d513 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -5,22 +5,14 @@ import { StreamlinedComment } from "../types/gpt"; import { dedupeStreamlinedComments, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; - - export async function recursivelyFetchLinkedIssues(params: FetchParams) { const { linkedIssues, seen, specAndBodies, streamlinedComments } = await fetchLinkedIssues(params); - const fetchPromises = linkedIssues.map(async (linkedIssue) => - await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen) - ); + const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); await throttlePromises(fetchPromises, 10); - const linkedIssuesKeys = linkedIssues.map((issue) => - createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`) - ); - const specAndBodyKeys = Array.from( - new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys]) - ); + const linkedIssuesKeys = linkedIssues.map((issue) => createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`)); + const specAndBodyKeys = Array.from(new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); return { linkedIssues, specAndBodies, streamlinedComments }; @@ -30,9 +22,7 @@ export async function fetchLinkedIssues(params: FetchParams) { const { comments, issue } = await fetchIssueComments(params); const issueKey = createKey(issue.html_url); const [owner, repo, issueNumber] = splitKey(issueKey); - const linkedIssues: LinkedIssues[] = [ - { body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }, - ]; + const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; const specAndBodies: Record = {}; const seen = new Set(); @@ -46,7 +36,7 @@ export async function fetchLinkedIssues(params: FetchParams) { }); for (const comment of comments) { - const foundIssues = idIssueFromComment(comment.body); + const foundIssues = idIssueFromComment(comment.body, params); if (foundIssues) { for (const linkedIssue of foundIssues) { const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); @@ -81,7 +71,6 @@ export async function mergeCommentsAndFetchSpec( specOrBodies: Record, seen: Set ) { - if (linkedIssue.comments) { const streamed = await getAllStreamlinedComments([linkedIssue]); const merged = mergeStreamlinedComments(streamlinedComments, streamed); @@ -160,4 +149,4 @@ export async function fetchAndHandleIssue( const [owner, repo, issueNumber] = splitKey(key); await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNumber) }, streamlinedComments, seen); return streamlinedComments[key] || []; -} \ No newline at end of file +} diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 067fa2c..42fe7de 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -23,7 +23,7 @@ export async function handleSpec( streamlinedComments: Record ) { specAndBodies[key] = specOrBody; - const otherReferences = idIssueFromComment(specOrBody); + const otherReferences = idIssueFromComment(specOrBody, params); if (otherReferences) { for (const ref of otherReferences) { @@ -58,7 +58,7 @@ export async function handleComment( streamlinedComments: Record, seen: Set ) { - const otherReferences = idIssueFromComment(comment.body); + const otherReferences = idIssueFromComment(comment.body, params); if (otherReferences) { for (const ref of otherReferences) { @@ -82,21 +82,25 @@ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, for (const comment of comments) { await handleComment(params, comment, streamlinedComments, seen); } - }) + }); await throttlePromises(commentProcessingPromises, 10); } export async function throttlePromises(promises: Promise[], limit: number) { const executing: Promise[] = []; + for (const promise of promises) { - executing.push(Promise.resolve(promise)) + const p = promise.then(() => { + void executing.splice(executing.indexOf(p), 1); + }); + + executing.push(p); + if (executing.length >= limit) { await Promise.race(executing); - const index = executing.indexOf(promise); - executing.splice(index, 1); } } - return Promise.all(executing); -} \ No newline at end of file + await Promise.all(executing); +} diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 77550cb..9a0f14a 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -1,5 +1,5 @@ import { createKey } from "../handlers/comments"; -import { LinkedIssues } from "../types/github"; +import { FetchParams, LinkedIssues } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; export function dedupeStreamlinedComments(streamlinedComments: Record) { @@ -33,7 +33,7 @@ export function splitKey(key: string): [string, string, string] { return [parts[0], parts[1], parts[2]]; } -export function idIssueFromComment(comment?: string | null): LinkedIssues[] | null { +export function idIssueFromComment(comment?: string | null, params?: FetchParams): LinkedIssues[] | null { const urlMatch = comment?.match(/https:\/\/(?:www\.)?github.com\/([^/]+)\/([^/]+)\/(pull|issue|issues)\/(\d+)/g); const response: LinkedIssues[] = []; @@ -42,13 +42,23 @@ export function idIssueFromComment(comment?: string | null): LinkedIssues[] | nu response.push(createLinkedIssueOrPr(url)); }); } - + /** + * These can only reference issues within the same repository + * so params works here + */ + const hashMatch = comment?.match(/#(\d+)/g); + if (hashMatch && hashMatch.length > 0) { + hashMatch.forEach((hash) => { + const issueNumber = hash.replace("#", ""); + const owner = params?.context.payload.repository?.owner?.login || ""; + const repo = params?.context.payload.repository?.name || ""; + response.push({ owner, repo, issueNumber: parseInt(issueNumber), url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` }); + }); + } return response; } -function createLinkedIssueOrPr( - url: string -): LinkedIssues { +function createLinkedIssueOrPr(url: string): LinkedIssues { const key = createKey(url); const [owner, repo, issueNumber] = splitKey(key); diff --git a/tests/main.test.ts b/tests/main.test.ts index d773fbf..ab67583 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -226,7 +226,7 @@ function transformCommentTemplate(commentId: number, issueNumber: number, body: }, body: TEST_QUESTION, url: "https://api.github.com/repos/ubiquity/test-repo/issues/comments/1", - html_url: "https://api.github.com/repos/ubiquity/test-repo/issues/1", + html_url: "https://www.github.com/ubiquity/test-repo/issues/1", owner: "ubiquity", repo: "test-repo", issue_number: 1, From 7d2cc57b56cb145e8b9eac8d14524044aac287da Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:19:20 +0100 Subject: [PATCH 50/72] chore: optional chaining, try catch blocks --- src/helpers/issue-fetching.ts | 58 +++++++++++++++++++++++++---------- src/helpers/issue-handling.ts | 4 +-- src/helpers/issue.ts | 2 +- 3 files changed, 44 insertions(+), 20 deletions(-) diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index f61d513..fc1653f 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -20,6 +20,9 @@ export async function recursivelyFetchLinkedIssues(params: FetchParams) { export async function fetchLinkedIssues(params: FetchParams) { const { comments, issue } = await fetchIssueComments(params); + if (!issue) { + return { streamlinedComments: {}, linkedIssues: [], specAndBodies: {}, seen: new Set() }; + } const issueKey = createKey(issue.html_url); const [owner, repo, issueNumber] = splitKey(issueKey); const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; @@ -53,8 +56,8 @@ export async function fetchLinkedIssues(params: FetchParams) { repo, }); - specAndBodies[linkedKey] = fetchedIssue.body || ""; - linkedIssue.body = fetchedIssue.body || ""; + specAndBodies[linkedKey] = fetchedIssue?.body || ""; + linkedIssue.body = fetchedIssue?.body || ""; linkedIssue.comments = fetchedComments; linkedIssues.push(linkedIssue); } @@ -101,37 +104,58 @@ export async function fetchPullRequestDiff(context: Context, org: string, repo: } export async function fetchIssue(params: FetchParams) { - const { octokit, payload } = params.context; + const { octokit, payload, logger } = params.context; const { issueNum, owner, repo } = params; - return await octokit.rest.issues - .get({ + try { + return await octokit.rest.issues + .get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }) + .then(({ data }) => data as Issue); + } catch (e) { + logger.error(`Error fetching issue `, { + e, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, issue_number: issueNum || payload.issue.number, - }) - .then(({ data }) => data as Issue); + }); + return null; + } } export async function fetchIssueComments(params: FetchParams) { - const { octokit, payload } = params.context; + const { octokit, payload, logger } = params.context; const { issueNum, owner, repo } = params; const issue = await fetchIssue(params); - let comments; - if (issue.pull_request) { - comments = await octokit.paginate(octokit.pulls.listReviewComments, { - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - pull_number: issueNum || payload.issue.number, - }); - } else { - comments = await octokit.paginate(octokit.issues.listComments, { + let comments: IssueComments | ReviewComments = []; + + try { + if (issue?.pull_request) { + comments = await octokit.paginate(octokit.pulls.listReviewComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + pull_number: issueNum || payload.issue.number, + }); + } else { + comments = await octokit.paginate(octokit.issues.listComments, { + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + } + } catch (e) { + logger.error(`Error fetching comments `, { + e, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, issue_number: issueNum || payload.issue.number, }); + comments = []; } return { diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 42fe7de..f10d998 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -38,13 +38,13 @@ export async function handleSpec( repo: ref.repo, issueNum: ref.issueNumber, }); - if (issue.body) { + if (issue?.body) { specAndBodies[anotherKey] = issue.body; } const [owner, repo, issueNum] = splitKey(anotherKey); if (!streamlinedComments[anotherKey]) { await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); - await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue.body || "", specAndBodies, anotherKey, seen, streamlinedComments); + await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue?.body || "", specAndBodies, anotherKey, seen, streamlinedComments); } } } diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 9a0f14a..e0ec8ca 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -68,4 +68,4 @@ function createLinkedIssueOrPr(url: string): LinkedIssues { issueNumber: parseInt(issueNumber), url, }; -} \ No newline at end of file +} From c4985001558c1f888c31a217d3d8c7e179450ed0 Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:23:56 +0100 Subject: [PATCH 51/72] chore: add token usage in html comment --- src/plugin.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/plugin.ts b/src/plugin.ts index 4fb5147..30c7622 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -58,7 +58,9 @@ export async function runPlugin(context: Context) { logger.info(`Answer: ${answer}`, { tokenUsage }); - commentToPost = answer; + const tokens = `\n\n<--\n${JSON.stringify(tokenUsage, null, 2)}\n-->`; + + commentToPost = answer + tokens; } catch (err) { let errorMessage; if (err instanceof LogReturn) { From ef080c97eba4c3857f0a3ed5b18eb1a84f796bed Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:28:06 +0100 Subject: [PATCH 52/72] chore: typo --- src/plugin.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plugin.ts b/src/plugin.ts index 30c7622..1c3c5e3 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -58,7 +58,7 @@ export async function runPlugin(context: Context) { logger.info(`Answer: ${answer}`, { tokenUsage }); - const tokens = `\n\n<--\n${JSON.stringify(tokenUsage, null, 2)}\n-->`; + const tokens = `\n\n`; commentToPost = answer + tokens; } catch (err) { From c4e98b6151180800e6c0a5817f1147556498b47d Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 26 Sep 2024 03:45:15 +0100 Subject: [PATCH 53/72] chore: readme --- README.md | 100 ++++++++++++-------------------------------------- manifest.json | 4 +- package.json | 4 +- wrangler.toml | 2 +- 4 files changed, 28 insertions(+), 82 deletions(-) diff --git a/README.md b/README.md index 4dcb9f1..99fd87f 100644 --- a/README.md +++ b/README.md @@ -1,93 +1,39 @@ -# `@ubiquibot/plugin-template` +# `@ubiquity-os/command-ask` -## Prerequisites +This is a highly context aware GitHub organization integrated bot that uses the OpenAI GPT-4o model to provide highly relevant answers to questions and queries in GitHub issues and pull requests. -- A good understanding of how the [kernel](https://github.com/ubiquity/ubiquibot-kernel) works and how to interact with it. -- A basic understanding of the Ubiquibot configuration and how to define your plugin's settings. +## Usage -## Getting Started +In any repository where your Ubiquity OS app is installed, both issues and pull requests alike, you simply mention `@UbiquityOS` with your question or query and using the latest OpenAi GPT-4o model, the bot will provide you with a highly relevant answer. -1. Create a new repository using this template. -2. Clone the repository to your local machine. -3. Install the dependencies preferably using `yarn` or `bun`. +## How it works -## Creating a new plugin +With it's huge context window, we are able to feed the entire conversational history to the model which we obtain by recursively fetching any referenced issues or pull requests from the chat history. This allows the model to have a very deep understanding of the current scope and provide highly relevant answers. -- If your plugin is to be used as a slash command which should have faster response times as opposed to longer running GitHub action tasks, you should use the `worker` type. +As it receives everything from discussions to pull request diffs and review comments, it is a highly versatile and capable bot that can assist in a wide range of scenarios. -1. Ensure you understand and have setup the [kernel](https://github.com/ubiquity/ubiquibot-kernel). -2. Update [compute.yml](./.github/workflows/compute.yml) with your plugin's name and update the `id`. -3. Update [context.ts](./src/types/context.ts) with the events that your plugin will fire on. -4. Update [plugin-inputs.ts](./src/types/plugin-inputs.ts) to match the `with:` settings in your org or repo level configuration. +## Installation -- Your plugin config should look similar to this: +`ubiquibot-config.yml`: ```yml -- plugin: /:compute.yml@development - name: plugin-name - id: plugin-name-command - description: "Plugin description" # small description of what the plugin does - command: "" # if you are creating a plugin with a slash command - example: "" # how to invoke the slash command - with: # these are the example settings, the kernel passes these to the plugin. - disabledCommands: [] - timers: - reviewDelayTolerance: 86000 - taskStaleTimeoutDuration: 2580000 - miscellaneous: - maxConcurrentTasks: 3 - labels: - time: [] - priority: [] +plugins: + - uses: + - plugin: http://localhost:4000 + with: + # Use your own app name for local testing + ubiquity_os_app_slug: "UbiquityOS" ``` -###### At this stage, your plugin will fire on your defined events with the required settings passed in from the kernel. You can now start writing your plugin's logic. +`.dev.vars` (for local testing): -5. Start building your plugin by adding your logic to the [plugin.ts](./src/plugin.ts) file. - -## Testing a plugin - -### Worker Plugins - -- `yarn/bun worker` - to run the worker locally. -- To trigger the worker, `POST` requests to http://localhost:4000/ with an event payload similar to: - -```ts -await fetch("http://localhost:4000/", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - stateId: "", - eventName: "", - eventPayload: "", - settings: "", - ref: "", - authToken: "", - }), -}); +```sh +# OpenAI API key +OPENAI_API_KEY=your-api-key ``` -A full example can be found [here](https://github.com/ubiquibot/assistive-pricing/blob/623ea3f950f04842f2d003bda3fc7b7684e41378/tests/http/request.http). - -### Action Plugins - -- Ensure the kernel is running and listening for events. -- Fire an event in/to the repo where the kernel is installed. This can be done in a number of ways, the easiest being via the GitHub UI or using the GitHub API, such as posting a comment, opening an issue, etc in the org/repo where the kernel is installed. -- The kernel will process the event and dispatch it using the settings defined in your `.ubiquibot-config.yml`. -- The `compute.yml` workflow will run and execute your plugin's logic. -- You can view the logs in the Actions tab of your repo. +## Testing -[Nektos Act](https://github.com/nektos/act) - a tool for running GitHub Actions locally. - -## More information - -- [Full Ubiquibot Configuration](https://github.com/ubiquity/ubiquibot/blob/0fde7551585499b1e0618ec8ea5e826f11271c9c/src/types/configuration-types.ts#L62) - helpful for defining your plugin's settings as they are strongly typed and will be validated by the kernel. -- [Ubiquibot V1](https://github.com/ubiquity/ubiquibot) - helpful for porting V1 functionality to V2, helper/utility functions, types, etc. Everything is based on the V1 codebase but with a more modular approach. When using V1 code, keep in mind that most all code will need refactored to work with the new V2 architecture. - -## Examples - -- [Start/Stop Slash Command](https://github.com/ubq-testing/start-stop-module) - simple -- [Assistive Pricing Plugin](https://github.com/ubiquibot/assistive-pricing) - complex -- [Conversation Rewards](https://github.com/ubiquibot/conversation-rewards) - really complex +```sh +yarn test +``` diff --git a/manifest.json b/manifest.json index 0ac072f..5d6ce58 100644 --- a/manifest.json +++ b/manifest.json @@ -1,5 +1,5 @@ { - "name": "gpt", - "description": "gpt", + "name": "command-ask", + "description": "A highly context aware organization integrated chatbot", "ubiquity:listeners": ["issue_comment.created"] } diff --git a/package.json b/package.json index 075342d..d172636 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { - "name": "@ubiquity-os/gpt-command", + "name": "@ubiquity-os/command-ask", "version": "1.0.0", - "description": "GPT command", + "description": "A highly context aware organization integrated chatbot", "author": "Ubiquity OS", "license": "MIT", "main": "src/worker.ts", diff --git a/wrangler.toml b/wrangler.toml index 3a28184..f780a61 100644 --- a/wrangler.toml +++ b/wrangler.toml @@ -1,4 +1,4 @@ -name = "gpt-command" +name = "command-ask" main = "src/worker.ts" compatibility_date = "2024-05-23" node_compat = true From 48faffb95419a9c195921e46bf98bf978c6c42ec Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 26 Sep 2024 09:45:25 +0100 Subject: [PATCH 54/72] chore: camelCase and add config test --- .env.example | 1 - README.md | 2 +- src/handlers/ask-gpt.ts | 4 ---- src/plugin.ts | 4 ++-- src/types/plugin-inputs.ts | 2 +- tests/main.test.ts | 17 ++++++----------- 6 files changed, 10 insertions(+), 20 deletions(-) delete mode 100644 .env.example diff --git a/.env.example b/.env.example deleted file mode 100644 index b9e5cff..0000000 --- a/.env.example +++ /dev/null @@ -1 +0,0 @@ -OPENAI_API_KEY="MY_SECRET" diff --git a/README.md b/README.md index 99fd87f..ac69d4e 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ plugins: - plugin: http://localhost:4000 with: # Use your own app name for local testing - ubiquity_os_app_slug: "UbiquityOS" + ubiquityOsAppSlug: "UbiquityOS" ``` `.dev.vars` (for local testing): diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 15f77a2..fe3101e 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -20,10 +20,6 @@ export async function askGpt(context: Context, formattedChat: string) { env: { OPENAI_API_KEY }, } = context; - if (!OPENAI_API_KEY) { - throw logger.error(`No OpenAI API Key detected!`); - } - const openAi = new OpenAI({ apiKey: OPENAI_API_KEY }); const chat = createChatHistory(formattedChat); diff --git a/src/plugin.ts b/src/plugin.ts index 1c3c5e3..680245d 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -24,11 +24,11 @@ export async function plugin(inputs: PluginInputs, env: Env) { export async function runPlugin(context: Context) { const { logger, - config: { ubiquity_os_app_slug }, + config: { ubiquityOsAppSlug }, } = context; const question = context.payload.comment.body; - const slugRegex = new RegExp(`@${ubiquity_os_app_slug} `, "gi"); + const slugRegex = new RegExp(`@${ubiquityOsAppSlug} `, "gi"); if (!question.match(slugRegex)) { logger.info("Comment does not mention the app. Skipping."); diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index b04d5f1..78c05a3 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -20,7 +20,7 @@ export interface PluginInputs { expect(infoSpy).toHaveBeenCalledWith("Comment is empty. Skipping."); }); - - it("should not ask GPT a question if no OpenAI API key is provided", async () => { - const ctx = createContext(TEST_SLASH_COMMAND); - const errorSpy = jest.spyOn(ctx.logger, "error"); - - createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); - ctx.env.OPENAI_API_KEY = ""; - await runPlugin(ctx); - - expect(errorSpy).toHaveBeenNthCalledWith(1, "No OpenAI API Key detected!"); + it("Should throw if OPENAI_API_KEY is not defined", () => { + const settings = {}; + expect(() => Value.Decode(envSchema, settings)).toThrow(TransformDecodeCheckError); }); it("should construct the chat history correctly", async () => { @@ -305,7 +300,7 @@ function createContext(body = TEST_SLASH_COMMAND) { }, logger: new Logs("debug"), config: { - ubiquity_os_app_slug: "UbiquityOS", + ubiquityOsAppSlug: "UbiquityOS", }, env: { OPENAI_API_KEY: "test", From 5076ec80d2c428f85beef48de25ef6a6f4f7859f Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Thu, 26 Sep 2024 09:58:57 +0100 Subject: [PATCH 55/72] chore: hardcode bot name --- README.md | 6 ++++-- src/handlers/ask-gpt.ts | 5 +++-- src/plugin.ts | 4 ++-- src/types/env.ts | 1 + src/types/plugin-inputs.ts | 3 ++- tests/main.test.ts | 5 ++--- 6 files changed, 14 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index ac69d4e..e74704e 100644 --- a/README.md +++ b/README.md @@ -21,8 +21,8 @@ plugins: - uses: - plugin: http://localhost:4000 with: - # Use your own app name for local testing - ubiquityOsAppSlug: "UbiquityOS" + model: "" + openAiBaseUrl: "" ``` `.dev.vars` (for local testing): @@ -30,6 +30,8 @@ plugins: ```sh # OpenAI API key OPENAI_API_KEY=your-api-key +UBIQUITY_OS_APP_SLUG="UbiquityOS" + ``` ## Testing diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index fe3101e..f537da4 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -18,9 +18,10 @@ export async function askGpt(context: Context, formattedChat: string) { const { logger, env: { OPENAI_API_KEY }, + config: { model, openAiBaseUrl }, } = context; - const openAi = new OpenAI({ apiKey: OPENAI_API_KEY }); + const openAi = new OpenAI({ apiKey: OPENAI_API_KEY, baseURL: openAiBaseUrl }); const chat = createChatHistory(formattedChat); @@ -28,7 +29,7 @@ export async function askGpt(context: Context, formattedChat: string) { const res: OpenAI.Chat.Completions.ChatCompletion = await openAi.chat.completions.create({ messages: createChatHistory(formattedChat), - model: "chatgpt-4o-latest", + model: model ?? "o1-mini", }); const answer = res.choices[0].message.content; diff --git a/src/plugin.ts b/src/plugin.ts index 680245d..f89c894 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -24,11 +24,11 @@ export async function plugin(inputs: PluginInputs, env: Env) { export async function runPlugin(context: Context) { const { logger, - config: { ubiquityOsAppSlug }, + env: { UBIQUITY_OS_APP_SLUG }, } = context; const question = context.payload.comment.body; - const slugRegex = new RegExp(`@${ubiquityOsAppSlug} `, "gi"); + const slugRegex = new RegExp(`@${UBIQUITY_OS_APP_SLUG} `, "gi"); if (!question.match(slugRegex)) { logger.info("Comment does not mention the app. Skipping."); diff --git a/src/types/env.ts b/src/types/env.ts index eef600a..e8a5f7b 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -13,6 +13,7 @@ dotenv.config(); */ export const envSchema = T.Object({ OPENAI_API_KEY: T.String(), + UBIQUITY_OS_APP_SLUG: T.String(), }); export const envValidator = new StandardValidator(envSchema); diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 78c05a3..a574032 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -20,7 +20,8 @@ export interface PluginInputs Date: Thu, 26 Sep 2024 12:15:18 +0100 Subject: [PATCH 56/72] chore: remove t.optional and add baseUrl check --- src/handlers/ask-gpt.ts | 5 ++++- src/types/plugin-inputs.ts | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index f537da4..299cfeb 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -21,7 +21,10 @@ export async function askGpt(context: Context, formattedChat: string) { config: { model, openAiBaseUrl }, } = context; - const openAi = new OpenAI({ apiKey: OPENAI_API_KEY, baseURL: openAiBaseUrl }); + const openAi = new OpenAI({ + apiKey: OPENAI_API_KEY, + ...(openAiBaseUrl && { baseUrl: openAiBaseUrl }), + }); const chat = createChatHistory(formattedChat); diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index a574032..8bfb036 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -20,8 +20,8 @@ export interface PluginInputs Date: Thu, 26 Sep 2024 12:32:11 +0100 Subject: [PATCH 57/72] chore: fix typo --- src/handlers/ask-gpt.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 299cfeb..fbf9209 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -23,7 +23,7 @@ export async function askGpt(context: Context, formattedChat: string) { const openAi = new OpenAI({ apiKey: OPENAI_API_KEY, - ...(openAiBaseUrl && { baseUrl: openAiBaseUrl }), + ...(openAiBaseUrl && { baseURL: openAiBaseUrl }), }); const chat = createChatHistory(formattedChat); From 6b05fdb264990c358c21fb43882f0e35e76b015b Mon Sep 17 00:00:00 2001 From: Keyrxng <106303466+Keyrxng@users.noreply.github.com> Date: Mon, 30 Sep 2024 15:29:33 +0100 Subject: [PATCH 58/72] chore: optional endpoint, remove packageManager --- package.json | 5 ++--- src/types/plugin-inputs.ts | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index d172636..dd8fb89 100644 --- a/package.json +++ b/package.json @@ -78,6 +78,5 @@ "extends": [ "@commitlint/config-conventional" ] - }, - "packageManager": "yarn@1.22.22" -} + } +} \ No newline at end of file diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 8bfb036..8b9e071 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -21,7 +21,7 @@ export interface PluginInputs Date: Sat, 5 Oct 2024 16:32:55 -0400 Subject: [PATCH 59/72] fix: project setup and supabase setup --- .cspell.json | 3 +- package.json | 5 +- src/adapters/index.ts | 31 ++++ src/adapters/openai/helpers/completions.ts | 64 +++++++ src/adapters/openai/helpers/openai.ts | 11 ++ src/adapters/supabase/helpers/comment.ts | 39 +++++ src/adapters/supabase/helpers/issues.ts | 47 +++++ src/adapters/supabase/helpers/supabase.ts | 12 ++ src/adapters/voyage/helpers/embedding.ts | 25 +++ src/adapters/voyage/helpers/rerankers.ts | 23 +++ src/adapters/voyage/helpers/voyage.ts | 12 ++ src/handlers/ask-gpt.ts | 56 +++--- src/helpers/format-chat-history.ts | 6 +- src/plugin.ts | 28 +-- src/types/context.ts | 4 +- src/types/env.ts | 3 + src/types/plugin-inputs.ts | 1 + supabase/.gitignore | 4 + supabase/config.toml | 161 ++++++++++++++++++ .../20241005200943_comments_function.sql | 0 supabase/seed.sql | 0 yarn.lock | 78 ++++++++- 22 files changed, 557 insertions(+), 56 deletions(-) create mode 100644 src/adapters/index.ts create mode 100644 src/adapters/openai/helpers/completions.ts create mode 100644 src/adapters/openai/helpers/openai.ts create mode 100644 src/adapters/supabase/helpers/comment.ts create mode 100644 src/adapters/supabase/helpers/issues.ts create mode 100644 src/adapters/supabase/helpers/supabase.ts create mode 100644 src/adapters/voyage/helpers/embedding.ts create mode 100644 src/adapters/voyage/helpers/rerankers.ts create mode 100644 src/adapters/voyage/helpers/voyage.ts create mode 100644 supabase/.gitignore create mode 100644 supabase/config.toml create mode 100644 supabase/migrations/20241005200943_comments_function.sql create mode 100644 supabase/seed.sql diff --git a/.cspell.json b/.cspell.json index b201567..00226da 100644 --- a/.cspell.json +++ b/.cspell.json @@ -21,7 +21,8 @@ "mischeck", "convo", "ubqbot", - "behaviour" + "behaviour", + "voyageai", ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], diff --git a/package.json b/package.json index dd8fb89..ba6f618 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,8 @@ "@ubiquity-dao/ubiquibot-logger": "^1.3.0", "dotenv": "^16.4.5", "openai": "^4.63.0", - "typebox-validators": "0.3.5" + "typebox-validators": "0.3.5", + "voyageai": "^0.0.1-5" }, "devDependencies": { "@commitlint/cli": "19.3.0", @@ -79,4 +80,4 @@ "@commitlint/config-conventional" ] } -} \ No newline at end of file +} diff --git a/src/adapters/index.ts b/src/adapters/index.ts new file mode 100644 index 0000000..08f09dd --- /dev/null +++ b/src/adapters/index.ts @@ -0,0 +1,31 @@ +import { SupabaseClient } from "@supabase/supabase-js"; +import { Context } from "../types"; +import { Comment } from "./supabase/helpers/comment"; +import { SuperSupabase } from "./supabase/helpers/supabase"; +import { Embedding as VoyageEmbedding } from "./voyage/helpers/embedding"; +import { SuperVoyage } from "./voyage/helpers/voyage"; +import { VoyageAIClient } from "voyageai"; +import { Issues } from "./supabase/helpers/issues"; +import { SuperOpenAi } from "./openai/helpers/openai"; +import OpenAI from "openai"; +import { Completions } from "./openai/helpers/completions"; +import { Rerankers } from "./voyage/helpers/rerankers"; + +export function createAdapters(supabaseClient: SupabaseClient, voyage: VoyageAIClient, openai: OpenAI, context: Context) { + return { + supabase: { + comment: new Comment(supabaseClient, context), + issue: new Issues(supabaseClient, context), + super: new SuperSupabase(supabaseClient, context), + }, + voyage: { + reranker: new Rerankers(voyage, context), + embedding: new VoyageEmbedding(voyage, context), + super: new SuperVoyage(voyage, context), + }, + openai: { + completions: new Completions(openai, context), + super: new SuperOpenAi(openai, context), + }, + }; +} diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts new file mode 100644 index 0000000..32669ac --- /dev/null +++ b/src/adapters/openai/helpers/completions.ts @@ -0,0 +1,64 @@ +import OpenAI from "openai"; +import { Context } from "../../../types"; +import { SuperOpenAi } from "./openai"; +const MAX_TOKENS = 3072; + +export interface CompletionsType { + answer: string; + tokenUsage: { + input: number; + output: number; + total: number; + }; +} + +export class Completions extends SuperOpenAi { + protected context: Context; + + constructor(client: OpenAI, context: Context) { + super(client, context); + this.context = context; + } + + async createCompletion(prompt: string, model: string = "o1-mini", addlContext: string[]): Promise { + const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ + model: model, + messages: [ + { + role: "system", + content: [ + { + type: "text", + text: + "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + + "Context: " + + addlContext.join("\n"), + }, + ], + }, + { + role: "user", + content: [ + { + type: "text", + text: prompt, + }, + ], + }, + ], + temperature: 0, + max_tokens: MAX_TOKENS, + top_p: 1, + frequency_penalty: 0, + presence_penalty: 0, + response_format: { + type: "text", + }, + }); + const answer = res.choices[0].message; + if (answer && answer.content && res.usage) { + return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } }; + } + return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } }; + } +} diff --git a/src/adapters/openai/helpers/openai.ts b/src/adapters/openai/helpers/openai.ts new file mode 100644 index 0000000..11457c6 --- /dev/null +++ b/src/adapters/openai/helpers/openai.ts @@ -0,0 +1,11 @@ +import { OpenAI } from "openai"; +import { Context } from "../../../types/context"; + +export class SuperOpenAi { + protected client: OpenAI; + protected context: Context; + constructor(client: OpenAI, context: Context) { + this.client = client; + this.context = context; + } +} diff --git a/src/adapters/supabase/helpers/comment.ts b/src/adapters/supabase/helpers/comment.ts new file mode 100644 index 0000000..90dd8c2 --- /dev/null +++ b/src/adapters/supabase/helpers/comment.ts @@ -0,0 +1,39 @@ +import { SupabaseClient } from "@supabase/supabase-js"; +import { SuperSupabase } from "./supabase"; +import { Context } from "../../../types/context"; + +export interface CommentType { + id: string; + plaintext: string; + markdown?: string; + author_id: number; + created_at: string; + modified_at: string; + embedding: number[]; +} + +export class Comment extends SuperSupabase { + constructor(supabase: SupabaseClient, context: Context) { + super(supabase, context); + } + async getComment(commentNodeId: string): Promise { + const { data, error } = await this.supabase.from("issue_comments").select("*").eq("id", commentNodeId); + if (error) { + this.context.logger.error("Error getting comment", error); + } + return data; + } + + async findSimilarComments(query: string, threshold: number, currentId: string): Promise { + const embedding = await this.context.adapters.voyage.embedding.createEmbedding(query); + const { data, error } = await this.supabase.rpc("find_similar_comments_with_vector_search_ftse", { + current_id: currentId, + query_embedding: embedding, + threshold: threshold, + }); + if (error) { + this.context.logger.error("Error finding similar comments", error); + } + return data; + } +} diff --git a/src/adapters/supabase/helpers/issues.ts b/src/adapters/supabase/helpers/issues.ts new file mode 100644 index 0000000..d370838 --- /dev/null +++ b/src/adapters/supabase/helpers/issues.ts @@ -0,0 +1,47 @@ +import { SupabaseClient } from "@supabase/supabase-js"; +import { SuperSupabase } from "./supabase"; +import { Context } from "../../../types/context"; + +export interface IssueSimilaritySearchResult { + issue_id: string; + issue_plaintext: string; + similarity: number; +} + +export interface IssueType { + id: string; + markdown?: string; + plaintext?: string; + payload?: Record; + author_id: number; + created_at: string; + modified_at: string; + embedding: number[]; +} + +export class Issues extends SuperSupabase { + constructor(supabase: SupabaseClient, context: Context) { + super(supabase, context); + } + async getIssue(issueNodeId: string): Promise { + const { data, error } = await this.supabase.from("issues").select("*").eq("id", issueNodeId).returns(); + if (error) { + this.context.logger.error("Error getting issue", error); + return null; + } + return data; + } + async findSimilarIssues(plaintext: string, threshold: number, currentId: string): Promise { + const embedding = await this.context.adapters.voyage.embedding.createEmbedding(plaintext); + const { data, error } = await this.supabase.rpc("find_similar_issues_vector_search_ftse", { + current_id: currentId, + query_embedding: embedding, + threshold: threshold, + }); + if (error) { + this.context.logger.error("Error finding similar issues", error); + return []; + } + return data; + } +} diff --git a/src/adapters/supabase/helpers/supabase.ts b/src/adapters/supabase/helpers/supabase.ts new file mode 100644 index 0000000..34e845c --- /dev/null +++ b/src/adapters/supabase/helpers/supabase.ts @@ -0,0 +1,12 @@ +import { SupabaseClient } from "@supabase/supabase-js"; +import { Context } from "../../../types/context"; + +export class SuperSupabase { + protected supabase: SupabaseClient; + protected context: Context; + + constructor(supabase: SupabaseClient, context: Context) { + this.supabase = supabase; + this.context = context; + } +} diff --git a/src/adapters/voyage/helpers/embedding.ts b/src/adapters/voyage/helpers/embedding.ts new file mode 100644 index 0000000..9943882 --- /dev/null +++ b/src/adapters/voyage/helpers/embedding.ts @@ -0,0 +1,25 @@ +import { VoyageAIClient } from "voyageai"; +import { Context } from "../../../types"; +import { SuperVoyage } from "./voyage"; +const VECTOR_SIZE = 1024; + +export class Embedding extends SuperVoyage { + protected context: Context; + + constructor(client: VoyageAIClient, context: Context) { + super(client, context); + this.context = context; + } + + async createEmbedding(text: string | null): Promise { + if (text === null) { + return new Array(VECTOR_SIZE).fill(0); + } else { + const response = await this.client.embed({ + input: text, + model: "voyage-large-3", + }); + return (response.data && response.data[0]?.embedding) || []; + } + } +} diff --git a/src/adapters/voyage/helpers/rerankers.ts b/src/adapters/voyage/helpers/rerankers.ts new file mode 100644 index 0000000..08fadae --- /dev/null +++ b/src/adapters/voyage/helpers/rerankers.ts @@ -0,0 +1,23 @@ +import { VoyageAIClient } from "voyageai"; +import { Context } from "../../../types"; +import { SuperVoyage } from "./voyage"; + +export class Rerankers extends SuperVoyage { + protected context: Context; + + constructor(client: VoyageAIClient, context: Context) { + super(client, context); + this.context = context; + } + + async reRankResults(results: string[], query: string): Promise { + const response = await this.client.rerank({ + query, + documents: results, + model: "voyage-large-3", + returnDocuments: true, + }); + const rerankedResults = response.data || []; + return rerankedResults.map((result) => result.document).filter((document): document is string => document !== undefined); + } +} diff --git a/src/adapters/voyage/helpers/voyage.ts b/src/adapters/voyage/helpers/voyage.ts new file mode 100644 index 0000000..c08c0af --- /dev/null +++ b/src/adapters/voyage/helpers/voyage.ts @@ -0,0 +1,12 @@ +import { VoyageAIClient } from "voyageai"; +import { Context } from "../../../types/context"; + +export class SuperVoyage { + protected client: VoyageAIClient; + protected context: Context; + + constructor(client: VoyageAIClient, context: Context) { + this.client = client; + this.context = context; + } +} diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index fbf9209..5dc83bc 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,47 +1,31 @@ -import OpenAI from "openai"; import { Context } from "../types"; -import { createChatHistory, formatChatHistory } from "../helpers/format-chat-history"; -import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; +import { CompletionsType } from "../adapters/openai/helpers/completions"; +import { CommentType } from "../adapters/supabase/helpers/comment"; export async function askQuestion(context: Context, question: string) { if (!question) { throw context.logger.error(`No question provided`); } - - const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); - - const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - return await askGpt(context, formattedChat); + //TODO: Temporary workaround + //const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); + //const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + return await askGpt(context, question); } -export async function askGpt(context: Context, formattedChat: string) { +export async function askGpt(context: Context, question: string): Promise { const { - logger, - env: { OPENAI_API_KEY }, - config: { model, openAiBaseUrl }, + config: { model, similarityThreshold }, } = context; - - const openAi = new OpenAI({ - apiKey: OPENAI_API_KEY, - ...(openAiBaseUrl && { baseURL: openAiBaseUrl }), - }); - - const chat = createChatHistory(formattedChat); - - logger.info(`Sending chat to OpenAI`, { chat }); - - const res: OpenAI.Chat.Completions.ChatCompletion = await openAi.chat.completions.create({ - messages: createChatHistory(formattedChat), - model: model ?? "o1-mini", - }); - - const answer = res.choices[0].message.content; - - const tokenUsage = { - output: res.usage?.completion_tokens, - input: res.usage?.prompt_tokens, - total: res.usage?.total_tokens, - }; - - return { answer, tokenUsage }; + //Fetch Similar Text + const similarComments = (await context.adapters.supabase.comment.findSimilarComments(question, similarityThreshold, "")) || []; + const similarIssues = (await context.adapters.supabase.issue.findSimilarIssues(question, similarityThreshold, "")) || []; + //Create a new object with plain text from both the objects + const similarText = similarComments.map((comment: CommentType) => comment.plaintext); + similarText.push(...similarIssues.map((issue) => issue.issue_plaintext)); + //Rerank Similar Comments and Issues + const rerankedText = await context.adapters.voyage.reranker.reRankResults(similarText, question); + //TODO: Temporary workaround + //const chat = createChatHistory(formattedChat); + //logger.info(`Sending chat to OpenAI`, { chat }); + return context.adapters.openai.completions.createCompletion(question, model, rerankedText); } diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 5d369fe..5d03dd7 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -145,9 +145,9 @@ export function createChatHistory(formattedChat: string) { const systemMessage: ChatCompletionMessageParam = { role: "system", content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. -Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. -The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. -Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, + Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. + The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. + Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, }; const userMessage: ChatCompletionMessageParam = { diff --git a/src/plugin.ts b/src/plugin.ts index f89c894..ab0de20 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -3,21 +3,33 @@ import { PluginInputs } from "./types"; import { Context } from "./types"; import { askQuestion } from "./handlers/ask-gpt"; import { addCommentToIssue } from "./handlers/add-comment"; -import { LogReturn, Logs } from "@ubiquity-dao/ubiquibot-logger"; +import { LogLevel, LogReturn, Logs } from "@ubiquity-dao/ubiquibot-logger"; import { Env } from "./types/env"; +import { createAdapters } from "./adapters"; +import { createClient } from "@supabase/supabase-js"; +import { VoyageAIClient } from "voyageai"; +import OpenAI from "openai"; export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); - + const supabase = createClient(env.SUPABASE_URL, env.SUPABASE_KEY); + const voyageClient = new VoyageAIClient({ + apiKey: env.VOYAGEAI_API_KEY, + }); + const openaiClient = new OpenAI({ + apiKey: env.OPENAI_API_KEY, + baseURL: inputs.settings.openAiBaseUrl || "https://api.openai.com", + }); const context: Context = { eventName: inputs.eventName, payload: inputs.eventPayload, config: inputs.settings, octokit, env, - logger: new Logs("debug"), + logger: new Logs("info" as LogLevel), + adapters: {} as ReturnType, }; - + context.adapters = createAdapters(supabase, voyageClient, openaiClient, context); return runPlugin(context); } @@ -27,27 +39,21 @@ export async function runPlugin(context: Context) { env: { UBIQUITY_OS_APP_SLUG }, } = context; const question = context.payload.comment.body; - const slugRegex = new RegExp(`@${UBIQUITY_OS_APP_SLUG} `, "gi"); - if (!question.match(slugRegex)) { logger.info("Comment does not mention the app. Skipping."); return; } - if (context.payload.comment.user?.type === "Bot") { logger.info("Comment is from a bot. Skipping."); return; } - if (question.replace(slugRegex, "").trim().length === 0) { logger.info("Comment is empty. Skipping."); return; } - logger.info(`Asking question: ${question}`); - let commentToPost = ""; - + let commentToPost; try { const response = await askQuestion(context, question); const { answer, tokenUsage } = response; diff --git a/src/types/context.ts b/src/types/context.ts index d5f7113..73f74b7 100644 --- a/src/types/context.ts +++ b/src/types/context.ts @@ -3,6 +3,7 @@ import { EmitterWebhookEvent as WebhookEvent, EmitterWebhookEventName as Webhook import { PluginSettings } from "./plugin-inputs"; import { Logs } from "@ubiquity-dao/ubiquibot-logger"; import { Env } from "./env"; +import { createAdapters } from "../adapters"; export type SupportedEventsU = "issue_comment.created"; @@ -15,6 +16,7 @@ export interface Context; config: PluginSettings; - logger: Logs; env: Env; + logger: Logs; + adapters: ReturnType; } diff --git a/src/types/env.ts b/src/types/env.ts index e8a5f7b..a2d9343 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -14,6 +14,9 @@ dotenv.config(); export const envSchema = T.Object({ OPENAI_API_KEY: T.String(), UBIQUITY_OS_APP_SLUG: T.String(), + VOYAGEAI_API_KEY: T.String(), + SUPABASE_URL: T.String(), + SUPABASE_KEY: T.String(), }); export const envValidator = new StandardValidator(envSchema); diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 8b9e071..2c0546d 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -22,6 +22,7 @@ export interface PluginInputs.s3-.amazonaws.com +s3_host = "env(S3_HOST)" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "env(S3_REGION)" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "env(S3_ACCESS_KEY)" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/supabase/migrations/20241005200943_comments_function.sql b/supabase/migrations/20241005200943_comments_function.sql new file mode 100644 index 0000000..e69de29 diff --git a/supabase/seed.sql b/supabase/seed.sql new file mode 100644 index 0000000..e69de29 diff --git a/yarn.lock b/yarn.lock index 18a112c..dc9255b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2176,6 +2176,11 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + before-after-hook@^2.2.0: version "2.2.3" resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" @@ -2242,6 +2247,14 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.2.1" + call-bind@^1.0.2, call-bind@^1.0.5, call-bind@^1.0.6, call-bind@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" @@ -3185,6 +3198,11 @@ eventemitter3@^5.0.1: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== +events@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" @@ -3377,6 +3395,11 @@ formdata-node@^4.3.2: node-domexception "1.0.0" web-streams-polyfill "4.0.0-beta.3" +formdata-node@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-6.0.3.tgz#48f8e2206ae2befded82af621ef015f08168dc6d" + integrity sha512-8e1++BCiTzUno9v5IZ2J6bv4RU+3UKDmqWUQD0MIMVCd9AdhWkO1gw57oo1mNEX1dMq2EGI+FbWz4B92pscSQg== + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -3672,6 +3695,11 @@ identity-function@^1.0.0: resolved "https://registry.yarnpkg.com/identity-function/-/identity-function-1.0.0.tgz#bea1159f0985239be3ca348edf40ce2f0dd2c21d" integrity sha512-kNrgUK0qI+9qLTBidsH85HjDLpZfrrS0ElquKKe/fJFdB3D7VeKdXXEvOPDUHSHOzdZKCAAaQIWWyp0l2yq6pw== +ieee754@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + ignore@^5.1.4, ignore@^5.1.8, ignore@^5.2.0, ignore@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" @@ -4412,6 +4440,11 @@ jiti@^1.21.0: resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.6.tgz#6c7f7398dd4b3142767f9a168af2f317a428d268" integrity sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w== +js-base64@3.7.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-3.7.2.tgz#816d11d81a8aff241603d19ce5761e13e41d7745" + integrity sha512-NnRs6dsyqUXejqk/yv2aiXlAvOs56sLkX6nUdeaNezI5LFFLlsZjOThmwnrcwh5ZZRwZlCMnVAY3CvhIhoVEKQ== + js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" @@ -4898,7 +4931,7 @@ node-fetch-native@^1.6.4: resolved "https://registry.yarnpkg.com/node-fetch-native/-/node-fetch-native-1.6.4.tgz#679fc8fd8111266d47d7e72c379f1bed9acff06e" integrity sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ== -node-fetch@^2.6.7: +node-fetch@2.7.0, node-fetch@^2.6.7: version "2.7.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== @@ -5293,6 +5326,11 @@ printable-characters@^1.0.42: resolved "https://registry.yarnpkg.com/printable-characters/-/printable-characters-1.0.42.tgz#3f18e977a9bd8eb37fcc4ff5659d7be90868b3d8" integrity sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ== +process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== + prompts@^2.0.1: version "2.4.2" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" @@ -5311,6 +5349,13 @@ pure-rand@^6.0.0: resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.1.0.tgz#d173cf23258231976ccbdb05247c9787957604f2" integrity sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA== +qs@6.11.2: + version "6.11.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.2.tgz#64bea51f12c1f5da1bc01496f48ffcff7c69d7d9" + integrity sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA== + dependencies: + side-channel "^1.0.4" + queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -5339,6 +5384,17 @@ readable-stream@^3.4.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^4.5.2: + version "4.5.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.5.2.tgz#9e7fc4c45099baeed934bff6eb97ba6cf2729e09" + integrity sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g== + dependencies: + abort-controller "^3.0.0" + buffer "^6.0.3" + events "^3.3.0" + process "^0.11.10" + string_decoder "^1.3.0" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -5794,7 +5850,7 @@ string.prototype.trimstart@^1.0.8: define-properties "^1.2.1" es-object-atoms "^1.0.0" -string_decoder@^1.1.1: +string_decoder@^1.1.1, string_decoder@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== @@ -6138,6 +6194,11 @@ uri-js@^4.2.2, uri-js@^4.4.1: dependencies: punycode "^2.1.0" +url-join@4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7" + integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA== + util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -6170,6 +6231,19 @@ vlq@^0.2.1: resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== +voyageai@^0.0.1-5: + version "0.0.1-5" + resolved "https://registry.yarnpkg.com/voyageai/-/voyageai-0.0.1-5.tgz#e0457d991784900c16e4cdf095654f195d62fdf2" + integrity sha512-IuXSXM3l9J3NIq+MLHXacG/yhswpEgWIu9eBqoFqMRnFiDx00dLL62OWg6WqVSipddZLwFeWH1Kaj56x5eqhOQ== + dependencies: + form-data "^4.0.0" + formdata-node "^6.0.3" + js-base64 "3.7.2" + node-fetch "2.7.0" + qs "6.11.2" + readable-stream "^4.5.2" + url-join "4.0.1" + vscode-languageserver-textdocument@^1.0.11: version "1.0.11" resolved "https://registry.yarnpkg.com/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.11.tgz#0822a000e7d4dc083312580d7575fe9e3ba2e2bf" From 51454d4338b07647b2bb5184f57e824a6a20f6a1 Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Sat, 5 Oct 2024 18:14:23 -0400 Subject: [PATCH 60/72] fix: tests --- src/adapters/index.ts | 4 +- .../supabase/helpers/{issues.ts => issue.ts} | 2 +- tests/main.test.ts | 162 ++++++++++++++++-- 3 files changed, 152 insertions(+), 16 deletions(-) rename src/adapters/supabase/helpers/{issues.ts => issue.ts} (97%) diff --git a/src/adapters/index.ts b/src/adapters/index.ts index 08f09dd..c72c5f8 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -5,7 +5,7 @@ import { SuperSupabase } from "./supabase/helpers/supabase"; import { Embedding as VoyageEmbedding } from "./voyage/helpers/embedding"; import { SuperVoyage } from "./voyage/helpers/voyage"; import { VoyageAIClient } from "voyageai"; -import { Issues } from "./supabase/helpers/issues"; +import { Issue } from "./supabase/helpers/issue"; import { SuperOpenAi } from "./openai/helpers/openai"; import OpenAI from "openai"; import { Completions } from "./openai/helpers/completions"; @@ -15,7 +15,7 @@ export function createAdapters(supabaseClient: SupabaseClient, voyage: VoyageAIC return { supabase: { comment: new Comment(supabaseClient, context), - issue: new Issues(supabaseClient, context), + issue: new Issue(supabaseClient, context), super: new SuperSupabase(supabaseClient, context), }, voyage: { diff --git a/src/adapters/supabase/helpers/issues.ts b/src/adapters/supabase/helpers/issue.ts similarity index 97% rename from src/adapters/supabase/helpers/issues.ts rename to src/adapters/supabase/helpers/issue.ts index d370838..bee8223 100644 --- a/src/adapters/supabase/helpers/issues.ts +++ b/src/adapters/supabase/helpers/issue.ts @@ -19,7 +19,7 @@ export interface IssueType { embedding: number[]; } -export class Issues extends SuperSupabase { +export class Issue extends SuperSupabase { constructor(supabase: SupabaseClient, context: Context) { super(supabase, context); } diff --git a/tests/main.test.ts b/tests/main.test.ts index 1b2069c..7720ab8 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -11,10 +11,13 @@ import { askQuestion } from "../src/handlers/ask-gpt"; import { runPlugin } from "../src/plugin"; import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; import { envSchema } from "../src/types/env"; +import { CompletionsType } from "../src/adapters/openai/helpers/completions"; const TEST_QUESTION = "what is pi?"; const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?"; const LOG_CALLER = "_Logs."; +const ISSUE_ID_2_CONTENT = "More context here #2"; +const ISSUE_ID_3_CONTENT = "More context here #3"; const systemMsg = `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. @@ -58,6 +61,7 @@ describe("Ask plugin tests", () => { it("should ask GPT a question", async () => { const ctx = createContext(TEST_SLASH_COMMAND); + console.log(ctx.adapters); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); const res = await askQuestion(ctx, TEST_QUESTION); @@ -112,16 +116,16 @@ describe("Ask plugin tests", () => { const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === -This is a demo spec for a demo task just perfect for testing. -=== End Current Issue #1 Specification === + This is a demo spec for a demo task just perfect for testing. + === End Current Issue #1 Specification === -=== Current Issue #1 Conversation === ubiquity/test-repo #1 === + === Current Issue #1 Conversation === ubiquity/test-repo #1 === -1 ubiquity: what is pi? -=== End Current Issue #1 Conversation ===\n -`; + 1 ubiquity: ${TEST_QUESTION} + === End Current Issue #1 Conversation ===\n + `; - expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: @UbiquityOS what is pi?"); + expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { caller: LOG_CALLER, chat: [ @@ -150,9 +154,9 @@ This is a demo spec for a demo task just perfect for testing. const ctx = createContext(TEST_SLASH_COMMAND); const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([ - transformCommentTemplate(1, 1, "More context here #2", "ubiquity", "test-repo", true), + transformCommentTemplate(1, 1, ISSUE_ID_2_CONTENT, "ubiquity", "test-repo", true), transformCommentTemplate(2, 1, TEST_QUESTION, "ubiquity", "test-repo", true), - transformCommentTemplate(3, 2, "More context here #3", "ubiquity", "test-repo", true), + transformCommentTemplate(3, 2, ISSUE_ID_3_CONTENT, "ubiquity", "test-repo", true), transformCommentTemplate(4, 3, "Just a comment", "ubiquity", "test-repo", true), ]); @@ -160,7 +164,7 @@ This is a demo spec for a demo task just perfect for testing. expect(infoSpy).toHaveBeenCalledTimes(3); - expect(infoSpy).toHaveBeenNthCalledWith(1, "Asking question: @UbiquityOS what is pi?"); + expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === @@ -169,8 +173,8 @@ This is a demo spec for a demo task just perfect for testing. === Current Issue #1 Conversation === ubiquity/test-repo #1 === -1 ubiquity: More context here #2 -2 ubiquity: what is pi? +1 ubiquity: ${ISSUE_ID_2_CONTENT} +2 ubiquity: ${TEST_QUESTION} === End Current Issue #1 Conversation === === Linked Issue #2 Specification === ubiquity/test-repo/2 === @@ -180,7 +184,7 @@ Related to issue #3 === Linked Issue #2 Conversation === ubiquity/test-repo #2 === -3 ubiquity: More context here #3 +3 ubiquity: ${ISSUE_ID_3_CONTENT} === End Linked Issue #2 Conversation === === Linked Issue #3 Specification === ubiquity/test-repo/3 === @@ -304,6 +308,138 @@ function createContext(body = TEST_SLASH_COMMAND) { UBIQUITY_OS_APP_SLUG: "UbiquityOS", OPENAI_API_KEY: "test", }, + adapters: { + supabase: { + issue: { + getIssue: async () => { + return [ + { + id: "1", + markdown: "This is a demo spec for a demo task just perfect for testing.", + plaintext: "This is a demo spec for a demo task just perfect for testing.", + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + ]; + }, + findSimilarIssues: async () => { + return [ + { + issue_id: "2", + issue_plaintext: "Related to issue #3", + similarity: 0.5, + }, + { + issue_id: "3", + issue_plaintext: "Someother issue", + similarity: 0.3, + }, + ]; + }, + }, + comment: { + getComments: async () => { + return [ + { + id: "1", + plaintext: TEST_QUESTION, + markdown: TEST_QUESTION, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "2", + plaintext: ISSUE_ID_2_CONTENT, + markdown: ISSUE_ID_2_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "3", + plaintext: ISSUE_ID_3_CONTENT, + markdown: ISSUE_ID_3_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "4", + plaintext: "Something new", + markdown: "Something new", + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + ]; + }, + findSimilarComments: async () => { + return [ + { + id: "2", + plaintext: ISSUE_ID_2_CONTENT, + markdown: ISSUE_ID_2_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "3", + plaintext: ISSUE_ID_3_CONTENT, + markdown: ISSUE_ID_3_CONTENT, + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + { + id: "4", + plaintext: "New Comment", + markdown: "New Comment", + author_id: 1, + created_at: new Date().toISOString(), + modified_at: new Date().toISOString(), + embedding: [1, 2, 3], + }, + ]; + }, + }, + }, + voyage: { + embedding: { + createEmbedding: async () => { + return new Array(1024).fill(0); + }, + }, + reranker: { + reRankResults: async (similarText: string[]) => { + return similarText; + }, + }, + }, + openai: { + completions: { + createCompletion: async (): Promise => { + return { + answer: "This is a mock answer for the chat", + tokenUsage: { + input: 1000, + output: 150, + total: 1150, + }, + }; + }, + }, + }, + }, octokit: new octokit.Octokit(), eventName: "issue_comment.created" as SupportedEventsU, } as unknown as Context; From 0f8201500efa88dd16424341bbd9e935877da823 Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Sat, 5 Oct 2024 22:07:10 -0400 Subject: [PATCH 61/72] feat: basic chat rag works --- .dev.vars.example | 5 +- package.json | 2 +- src/adapters/index.ts | 2 +- src/adapters/openai/helpers/completions.ts | 4 +- src/adapters/supabase/helpers/comment.ts | 14 ++- .../supabase/helpers/{issue.ts => issues.ts} | 5 +- src/adapters/voyage/helpers/embedding.ts | 2 +- src/adapters/voyage/helpers/rerankers.ts | 3 +- src/handlers/ask-gpt.ts | 18 ++- src/plugin.ts | 3 +- src/types/plugin-inputs.ts | 2 +- .../20241005200943_comments_function.sql | 119 ++++++++++++++++++ tests/main.test.ts | 1 - 13 files changed, 164 insertions(+), 16 deletions(-) rename src/adapters/supabase/helpers/{issue.ts => issues.ts} (93%) diff --git a/.dev.vars.example b/.dev.vars.example index b9e5cff..e39f3dd 100644 --- a/.dev.vars.example +++ b/.dev.vars.example @@ -1 +1,4 @@ -OPENAI_API_KEY="MY_SECRET" +OPENAI_API_KEY="" +SUPABASE_URL="" +SUPABASE_KEY="" +VOYAGEAI_API_KEY="" \ No newline at end of file diff --git a/package.json b/package.json index ba6f618..c6bb216 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,7 @@ "knip-ci": "knip --no-exit-code --reporter json --config .github/knip.ts", "prepare": "husky install", "test": "jest --setupFiles dotenv/config --coverage", - "worker": "wrangler dev --env dev --port 4000" + "worker": "wrangler dev --env dev --port 5000" }, "keywords": [ "typescript", diff --git a/src/adapters/index.ts b/src/adapters/index.ts index c72c5f8..5040ebd 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -5,7 +5,7 @@ import { SuperSupabase } from "./supabase/helpers/supabase"; import { Embedding as VoyageEmbedding } from "./voyage/helpers/embedding"; import { SuperVoyage } from "./voyage/helpers/voyage"; import { VoyageAIClient } from "voyageai"; -import { Issue } from "./supabase/helpers/issue"; +import { Issue } from "./supabase/helpers/issues"; import { SuperOpenAi } from "./openai/helpers/openai"; import OpenAI from "openai"; import { Completions } from "./openai/helpers/completions"; diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 32669ac..83636ad 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -1,7 +1,7 @@ import OpenAI from "openai"; import { Context } from "../../../types"; import { SuperOpenAi } from "./openai"; -const MAX_TOKENS = 3072; +const MAX_TOKENS = 3000; export interface CompletionsType { answer: string; @@ -46,7 +46,7 @@ export class Completions extends SuperOpenAi { ], }, ], - temperature: 0, + temperature: 0.2, max_tokens: MAX_TOKENS, top_p: 1, frequency_penalty: 0, diff --git a/src/adapters/supabase/helpers/comment.ts b/src/adapters/supabase/helpers/comment.ts index 90dd8c2..e6aff31 100644 --- a/src/adapters/supabase/helpers/comment.ts +++ b/src/adapters/supabase/helpers/comment.ts @@ -12,6 +12,14 @@ export interface CommentType { embedding: number[]; } +export interface CommentSimilaritySearchResult { + comment_id: string; + comment_plaintext: string; + comment_issue_id: string; + similarity: number; + text_similarity: number; +} + export class Comment extends SuperSupabase { constructor(supabase: SupabaseClient, context: Context) { super(supabase, context); @@ -24,12 +32,14 @@ export class Comment extends SuperSupabase { return data; } - async findSimilarComments(query: string, threshold: number, currentId: string): Promise { + async findSimilarComments(query: string, threshold: number, currentId: string): Promise { const embedding = await this.context.adapters.voyage.embedding.createEmbedding(query); - const { data, error } = await this.supabase.rpc("find_similar_comments_with_vector_search_ftse", { + const { data, error } = await this.supabase.rpc("find_similar_comments", { current_id: currentId, + query_text: query, query_embedding: embedding, threshold: threshold, + max_results: 10, }); if (error) { this.context.logger.error("Error finding similar comments", error); diff --git a/src/adapters/supabase/helpers/issue.ts b/src/adapters/supabase/helpers/issues.ts similarity index 93% rename from src/adapters/supabase/helpers/issue.ts rename to src/adapters/supabase/helpers/issues.ts index bee8223..8bd083e 100644 --- a/src/adapters/supabase/helpers/issue.ts +++ b/src/adapters/supabase/helpers/issues.ts @@ -6,6 +6,7 @@ export interface IssueSimilaritySearchResult { issue_id: string; issue_plaintext: string; similarity: number; + text_similarity: number; } export interface IssueType { @@ -33,10 +34,12 @@ export class Issue extends SuperSupabase { } async findSimilarIssues(plaintext: string, threshold: number, currentId: string): Promise { const embedding = await this.context.adapters.voyage.embedding.createEmbedding(plaintext); - const { data, error } = await this.supabase.rpc("find_similar_issues_vector_search_ftse", { + const { data, error } = await this.supabase.rpc("find_similar_issue_ftse", { current_id: currentId, + query_text: plaintext, query_embedding: embedding, threshold: threshold, + max_results: 10, }); if (error) { this.context.logger.error("Error finding similar issues", error); diff --git a/src/adapters/voyage/helpers/embedding.ts b/src/adapters/voyage/helpers/embedding.ts index 9943882..575543e 100644 --- a/src/adapters/voyage/helpers/embedding.ts +++ b/src/adapters/voyage/helpers/embedding.ts @@ -17,7 +17,7 @@ export class Embedding extends SuperVoyage { } else { const response = await this.client.embed({ input: text, - model: "voyage-large-3", + model: "voyage-large-2-instruct", }); return (response.data && response.data[0]?.embedding) || []; } diff --git a/src/adapters/voyage/helpers/rerankers.ts b/src/adapters/voyage/helpers/rerankers.ts index 08fadae..734063d 100644 --- a/src/adapters/voyage/helpers/rerankers.ts +++ b/src/adapters/voyage/helpers/rerankers.ts @@ -14,8 +14,9 @@ export class Rerankers extends SuperVoyage { const response = await this.client.rerank({ query, documents: results, - model: "voyage-large-3", + model: "rerank-2", returnDocuments: true, + topK: 5, }); const rerankedResults = response.data || []; return rerankedResults.map((result) => result.document).filter((document): document is string => document !== undefined); diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 5dc83bc..90e5222 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -1,6 +1,7 @@ import { Context } from "../types"; import { CompletionsType } from "../adapters/openai/helpers/completions"; -import { CommentType } from "../adapters/supabase/helpers/comment"; +import { CommentSimilaritySearchResult } from "../adapters/supabase/helpers/comment"; +import { IssueSimilaritySearchResult } from "../adapters/supabase/helpers/issues"; export async function askQuestion(context: Context, question: string) { if (!question) { @@ -20,12 +21,23 @@ export async function askGpt(context: Context, question: string): Promise comment.plaintext); - similarText.push(...similarIssues.map((issue) => issue.issue_plaintext)); + const similarText = similarComments.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext); + similarText.push(...similarIssues.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext)); //Rerank Similar Comments and Issues const rerankedText = await context.adapters.voyage.reranker.reRankResults(similarText, question); + //Remove unwanted characters from the text + rerankedText.forEach((text) => removeUnwantedChars(text)); //TODO: Temporary workaround //const chat = createChatHistory(formattedChat); //logger.info(`Sending chat to OpenAI`, { chat }); return context.adapters.openai.completions.createCompletion(question, model, rerankedText); } + +/** + * Removes unwanted characters from the text like emojis, special characters etc. + * @param text + * @returns + */ +const removeUnwantedChars = (text: string) => { + return text.replace(/[^a-zA-Z0-9\s]/g, ""); +}; diff --git a/src/plugin.ts b/src/plugin.ts index ab0de20..33f441d 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -16,9 +16,10 @@ export async function plugin(inputs: PluginInputs, env: Env) { const voyageClient = new VoyageAIClient({ apiKey: env.VOYAGEAI_API_KEY, }); + const openaiClient = new OpenAI({ apiKey: env.OPENAI_API_KEY, - baseURL: inputs.settings.openAiBaseUrl || "https://api.openai.com", + ...(inputs.settings.openAiBaseUrl && { baseUrl: inputs.settings.openAiBaseUrl }), }); const context: Context = { eventName: inputs.eventName, diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 2c0546d..694bb50 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -22,7 +22,7 @@ export interface PluginInputs 2; + + -- Create tsquery from tokens + SELECT to_tsquery(string_agg(lexeme || ':*', ' | ')) + INTO query_tsquery + FROM unnest(query_tokens) lexeme; + + RETURN QUERY + WITH vector_similarity AS ( + SELECT + id, + plaintext, + (1 - (embedding <-> query_embedding))::DOUBLE PRECISION AS vec_similarity + FROM issues + WHERE id <> current_id + AND (1 - (embedding <-> query_embedding))::DOUBLE PRECISION > threshold + ), + text_similarity AS ( + SELECT + id, + plaintext, + ts_rank(to_tsvector('english', plaintext), query_tsquery)::DOUBLE PRECISION AS text_sim + FROM issues + WHERE to_tsvector('english', plaintext) @@ query_tsquery + ) + SELECT + vs.id AS issue_id, + vs.plaintext AS issue_plaintext, + vs.vec_similarity AS similarity, + COALESCE(ts.text_sim, 0::DOUBLE PRECISION) AS text_similarity + FROM vector_similarity vs + LEFT JOIN text_similarity ts ON vs.id = ts.id + ORDER BY (vs.vec_similarity + COALESCE(ts.text_sim, 0::DOUBLE PRECISION)) DESC + LIMIT max_results; +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION find_similar_comments( + current_id VARCHAR, + query_text TEXT, + query_embedding VECTOR(1024), + threshold DOUBLE PRECISION, + max_results INTEGER DEFAULT 10 +) +RETURNS TABLE( + comment_id VARCHAR, + comment_plaintext TEXT, + comment_issue_id VARCHAR, + similarity DOUBLE PRECISION, + text_similarity DOUBLE PRECISION +) AS $$ +DECLARE + query_tokens TEXT[]; + query_tsquery TSQUERY; +BEGIN + -- Generate query tokens + SELECT array_agg(DISTINCT lower(word)) + INTO query_tokens + FROM unnest(regexp_split_to_array(query_text, '\s+')) AS word + WHERE length(word) > 2; + + -- Create tsquery from tokens + SELECT to_tsquery(string_agg(lexeme || ':*', ' | ')) + INTO query_tsquery + FROM unnest(query_tokens) lexeme; + + RETURN QUERY + WITH vector_similarity AS ( + SELECT + id, + plaintext, + issue_id, + 1 - (l2_distance(query_embedding, embedding))::DOUBLE PRECISION AS vec_similarity + FROM issue_comments + WHERE id <> current_id + AND 1 - (l2_distance(query_embedding, embedding))::DOUBLE PRECISION > threshold + ), + text_similarity AS ( + SELECT + id, + plaintext, + issue_id, + ts_rank(to_tsvector('english', plaintext), query_tsquery)::DOUBLE PRECISION AS text_sim + FROM issue_comments + WHERE to_tsvector('english', plaintext) @@ query_tsquery + ) + SELECT + vs.id AS comment_id, + vs.plaintext AS comment_plaintext, + vs.issue_id AS comment_issue_id, + vs.vec_similarity AS similarity, + COALESCE(ts.text_sim, 0::DOUBLE PRECISION) AS text_similarity + FROM vector_similarity vs + LEFT JOIN text_similarity ts ON vs.id = ts.id + ORDER BY (vs.vec_similarity + COALESCE(ts.text_sim, 0::DOUBLE PRECISION)) DESC + LIMIT max_results; +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/tests/main.test.ts b/tests/main.test.ts index 7720ab8..067158e 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -61,7 +61,6 @@ describe("Ask plugin tests", () => { it("should ask GPT a question", async () => { const ctx = createContext(TEST_SLASH_COMMAND); - console.log(ctx.adapters); createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]); const res = await askQuestion(ctx, TEST_QUESTION); From fa679485a36be1545492e88f0d85e2b22d4ee69c Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Sat, 5 Oct 2024 23:04:30 -0400 Subject: [PATCH 62/72] fix: cspell --- .cspell.json | 7 ++++++- src/adapters/openai/helpers/completions.ts | 4 ++-- src/plugin.ts | 8 ++++---- tests/main.test.ts | 2 +- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/.cspell.json b/.cspell.json index 00226da..deb1bde 100644 --- a/.cspell.json +++ b/.cspell.json @@ -1,7 +1,7 @@ { "$schema": "https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json", "version": "0.2", - "ignorePaths": ["**/*.json", "**/*.css", "node_modules", "**/*.log", "./src/adapters/supabase/**/**.ts"], + "ignorePaths": ["**/*.json", "**/*.css", "node_modules", "**/*.log", "./src/adapters/supabase/**/**.ts", "/supabase/*"], "useGitignore": true, "language": "en", "words": [ @@ -23,6 +23,11 @@ "ubqbot", "behaviour", "voyageai", + "Rerankers", + "reranker", + "rerank", + "reranked", + ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index 83636ad..b73af7e 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -20,7 +20,7 @@ export class Completions extends SuperOpenAi { this.context = context; } - async createCompletion(prompt: string, model: string = "o1-mini", addlContext: string[]): Promise { + async createCompletion(prompt: string, model: string = "o1-mini", additionalContext: string[]): Promise { const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ model: model, messages: [ @@ -32,7 +32,7 @@ export class Completions extends SuperOpenAi { text: "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + "Context: " + - addlContext.join("\n"), + additionalContext.join("\n"), }, ], }, diff --git a/src/plugin.ts b/src/plugin.ts index 33f441d..9a6644f 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -16,11 +16,11 @@ export async function plugin(inputs: PluginInputs, env: Env) { const voyageClient = new VoyageAIClient({ apiKey: env.VOYAGEAI_API_KEY, }); - - const openaiClient = new OpenAI({ + const openAiObject = { apiKey: env.OPENAI_API_KEY, - ...(inputs.settings.openAiBaseUrl && { baseUrl: inputs.settings.openAiBaseUrl }), - }); + ...(inputs.settings.openAiBaseUrl && { baseURL: inputs.settings.openAiBaseUrl }), + }; + const openaiClient = new OpenAI(openAiObject); const context: Context = { eventName: inputs.eventName, payload: inputs.eventPayload, diff --git a/tests/main.test.ts b/tests/main.test.ts index 067158e..f1f0cd9 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -332,7 +332,7 @@ function createContext(body = TEST_SLASH_COMMAND) { }, { issue_id: "3", - issue_plaintext: "Someother issue", + issue_plaintext: "Some other issue", similarity: 0.3, }, ]; From d1b4514e4eca8321c8873aa9a1b6bf5b1b879cef Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Fri, 11 Oct 2024 23:28:39 -0400 Subject: [PATCH 63/72] fix: ci passing locally and cleanup --- .cspell.json | 5 +- package.json | 3 +- src/adapters/openai/helpers/completions.ts | 63 ++++- src/adapters/voyage/helpers/rerankers.ts | 22 +- src/handlers/add-comment.ts | 6 +- src/handlers/ask-gpt.ts | 84 +++++-- src/handlers/comments.ts | 61 +++-- src/helpers/format-chat-history.ts | 165 +++++++------ src/helpers/issue-fetching.ts | 226 +++++++++++++---- src/helpers/issue-handling.ts | 55 ++++- src/helpers/issue.ts | 239 ++++++++++++++++-- src/plugin.ts | 7 +- src/types/github.ts | 30 ++- tests/main.test.ts | 106 +++----- yarn.lock | 275 ++++++++++++++++----- 15 files changed, 980 insertions(+), 367 deletions(-) diff --git a/.cspell.json b/.cspell.json index deb1bde..bbe91d8 100644 --- a/.cspell.json +++ b/.cspell.json @@ -27,7 +27,10 @@ "reranker", "rerank", "reranked", - + "mixtral", + "nemo", + "Reranking", + "mistralai" ], "dictionaries": ["typescript", "node", "software-terms"], "import": ["@cspell/dict-typescript/cspell-ext.json", "@cspell/dict-node/cspell-ext.json", "@cspell/dict-software-terms"], diff --git a/package.json b/package.json index c6bb216..66b7373 100644 --- a/package.json +++ b/package.json @@ -27,9 +27,11 @@ "open-source" ], "dependencies": { + "@mswjs/data": "^0.16.2", "@octokit/rest": "20.1.1", "@octokit/webhooks": "13.2.7", "@sinclair/typebox": "0.32.33", + "@supabase/supabase-js": "^2.45.4", "@ubiquity-dao/ubiquibot-logger": "^1.3.0", "dotenv": "^16.4.5", "openai": "^4.63.0", @@ -44,7 +46,6 @@ "@cspell/dict-typescript": "3.1.5", "@eslint/js": "9.5.0", "@jest/globals": "29.7.0", - "@mswjs/data": "0.16.1", "@types/jest": "^29.5.12", "@types/node": "20.14.5", "cspell": "8.9.0", diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index b73af7e..b65d67b 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -1,7 +1,7 @@ import OpenAI from "openai"; import { Context } from "../../../types"; import { SuperOpenAi } from "./openai"; -const MAX_TOKENS = 3000; +const MAX_TOKENS = 7000; export interface CompletionsType { answer: string; @@ -20,7 +20,14 @@ export class Completions extends SuperOpenAi { this.context = context; } - async createCompletion(prompt: string, model: string = "o1-mini", additionalContext: string[]): Promise { + async createCompletion( + prompt: string, + model: string = "o1-mini", + additionalContext: string[], + localContext: string[], + groundTruths: string[], + botName: string + ): Promise { const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ model: model, messages: [ @@ -30,9 +37,17 @@ export class Completions extends SuperOpenAi { { type: "text", text: + "You Must obey the following ground truths: [" + + groundTruths.join(":") + + "]\n" + "You are tasked with assisting as a GitHub bot by generating responses based on provided chat history and similar responses, focusing on using available knowledge within the provided corpus, which may contain code, documentation, or incomplete information. Your role is to interpret and use this knowledge effectively to answer user questions.\n\n# Steps\n\n1. **Understand Context**: Review the chat history and any similar provided responses to understand the context.\n2. **Extract Relevant Information**: Identify key pieces of information, even if they are incomplete, from the available corpus.\n3. **Apply Knowledge**: Use the extracted information and relevant documentation to construct an informed response.\n4. **Draft Response**: Compile the gathered insights into a coherent and concise response, ensuring it's clear and directly addresses the user's query.\n5. **Review and Refine**: Check for accuracy and completeness, filling any gaps with logical assumptions where necessary.\n\n# Output Format\n\n- Concise and coherent responses in paragraphs that directly address the user's question.\n- Incorporate inline code snippets or references from the documentation if relevant.\n\n# Examples\n\n**Example 1**\n\n*Input:*\n- Chat History: \"What was the original reason for moving the LP tokens?\"\n- Corpus Excerpts: \"It isn't clear to me if we redid the staking yet and if we should migrate. If so, perhaps we should make a new issue instead. We should investigate whether the missing LP tokens issue from the MasterChefV2.1 contract is critical to the decision of migrating or not.\"\n\n*Output:*\n\"It was due to missing LP tokens issue from the MasterChefV2.1 Contract.\n\n# Notes\n\n- Ensure the response is crafted from the corpus provided, without introducing information outside of what's available or relevant to the query.\n- Consider edge cases where the corpus might lack explicit answers, and justify responses with logical reasoning based on the existing information." + - "Context: " + - additionalContext.join("\n"), + "Your name is : " + + botName + + "\n" + + "Primary Context: " + + additionalContext.join("\n") + + "\nLocal Context: " + + localContext.join("\n"), }, ], }, @@ -48,6 +63,46 @@ export class Completions extends SuperOpenAi { ], temperature: 0.2, max_tokens: MAX_TOKENS, + top_p: 0.5, + frequency_penalty: 0, + presence_penalty: 0, + response_format: { + type: "text", + }, + }); + const answer = res.choices[0].message; + console.log(JSON.stringify(res, null, 2)); + if (answer && answer.content && res.usage) { + return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } }; + } + return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } }; + } + + async contextCompressionCalls(context: string[]): Promise { + const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ + model: "mistralai/mistral-nemo", + messages: [ + { + role: "system", + content: [ + { + type: "text", + text: "You are a LLM responsible for compression the context for better processing, do not leave anything out", + }, + ], + }, + { + role: "user", + content: [ + { + type: "text", + text: context.join("\n"), + }, + ], + }, + ], + temperature: 0.2, + max_tokens: 300, top_p: 1, frequency_penalty: 0, presence_penalty: 0, diff --git a/src/adapters/voyage/helpers/rerankers.ts b/src/adapters/voyage/helpers/rerankers.ts index 734063d..9d68aee 100644 --- a/src/adapters/voyage/helpers/rerankers.ts +++ b/src/adapters/voyage/helpers/rerankers.ts @@ -10,14 +10,20 @@ export class Rerankers extends SuperVoyage { this.context = context; } - async reRankResults(results: string[], query: string): Promise { - const response = await this.client.rerank({ - query, - documents: results, - model: "rerank-2", - returnDocuments: true, - topK: 5, - }); + async reRankResults(results: string[], query: string, topK: number = 5): Promise { + let response; + try { + response = await this.client.rerank({ + query, + documents: results, + model: "rerank-2", + returnDocuments: true, + topK, + }); + } catch (e: unknown) { + this.context.logger.error("Reranking failed!", { e }); + return results; + } const rerankedResults = response.data || []; return rerankedResults.map((result) => result.document).filter((document): document is string => document !== undefined); } diff --git a/src/handlers/add-comment.ts b/src/handlers/add-comment.ts index 56068f9..ec4a731 100644 --- a/src/handlers/add-comment.ts +++ b/src/handlers/add-comment.ts @@ -1,9 +1,13 @@ import { Context } from "../types/context"; +/** + * Add a comment to an issue + * @param context - The context object containing environment and configuration details + * @param message - The message to add as a comment + */ export async function addCommentToIssue(context: Context, message: string) { const { payload } = context; const issueNumber = payload.issue.number; - try { await context.octokit.issues.createComment({ owner: payload.repository.owner.login, diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 90e5222..9cb1b02 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -2,35 +2,76 @@ import { Context } from "../types"; import { CompletionsType } from "../adapters/openai/helpers/completions"; import { CommentSimilaritySearchResult } from "../adapters/supabase/helpers/comment"; import { IssueSimilaritySearchResult } from "../adapters/supabase/helpers/issues"; +import { recursivelyFetchLinkedIssues } from "../helpers/issue-fetching"; +import { formatChatHistory } from "../helpers/format-chat-history"; +import { optimizeContext } from "../helpers/issue"; +/** + * Asks a question to GPT and returns the response + * @param context - The context object containing environment and configuration details + * @param question - The question to ask GPT + * @returns The response from GPT + * @throws If no question is provided + */ export async function askQuestion(context: Context, question: string) { if (!question) { - throw context.logger.error(`No question provided`); + throw context.logger.error("No question provided"); } - //TODO: Temporary workaround - //const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ context }); - //const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - return await askGpt(context, question); + const { specAndBodies, streamlinedComments } = await recursivelyFetchLinkedIssues({ + context, + owner: context.payload.repository.owner.login, + repo: context.payload.repository.name, + }); + const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + console.log("formattedChat", formattedChat); + return await askGpt(context, question, formattedChat); } -export async function askGpt(context: Context, question: string): Promise { +/** + * Asks GPT a question and returns the completions + * @param context - The context object containing environment and configuration details + * @param question - The question to ask GPT + * @param formattedChat - The formatted chat history to provide context to GPT + * @returns completions - The completions generated by GPT + **/ +export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { const { + env: { UBIQUITY_OS_APP_SLUG }, config: { model, similarityThreshold }, } = context; - //Fetch Similar Text - const similarComments = (await context.adapters.supabase.comment.findSimilarComments(question, similarityThreshold, "")) || []; - const similarIssues = (await context.adapters.supabase.issue.findSimilarIssues(question, similarityThreshold, "")) || []; - //Create a new object with plain text from both the objects - const similarText = similarComments.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext); + let similarComments: CommentSimilaritySearchResult[] = []; + let similarIssues: IssueSimilaritySearchResult[] = []; + try { + similarComments = (await context.adapters.supabase.comment.findSimilarComments(question, similarityThreshold, "")) || []; + } catch (error) { + context.logger.error(`Error fetching similar comments: ${(error as Error).message}`); + } + try { + similarIssues = (await context.adapters.supabase.issue.findSimilarIssues(question, similarityThreshold, "")) || []; + } catch (error) { + context.logger.error(`Error fetching similar issues: ${(error as Error).message}`); + } + let similarText = similarComments.map((comment: CommentSimilaritySearchResult) => comment.comment_plaintext); similarText.push(...similarIssues.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext)); - //Rerank Similar Comments and Issues - const rerankedText = await context.adapters.voyage.reranker.reRankResults(similarText, question); - //Remove unwanted characters from the text + // Remove Null Results (Private Comments) + similarText = similarText.filter((text) => text !== null); + formattedChat = formattedChat.filter((text) => text !== null); + context.logger.info(formattedChat.join("")); + // Optimize the context + formattedChat = optimizeContext(formattedChat); + // ReRank the results based on the question + // const reRankedChat = formattedChat.length > 0 ? await context.adapters.voyage.reranker.reRankResults(formattedChat.filter(text => text !== ""), question, 300) : []; + similarText = similarText.filter((text) => text !== ""); + const rerankedText = similarText.length > 0 ? await context.adapters.voyage.reranker.reRankResults(similarText, question) : []; rerankedText.forEach((text) => removeUnwantedChars(text)); - //TODO: Temporary workaround - //const chat = createChatHistory(formattedChat); - //logger.info(`Sending chat to OpenAI`, { chat }); - return context.adapters.openai.completions.createCompletion(question, model, rerankedText); + return context.adapters.openai.completions.createCompletion( + question, + model, + rerankedText, + formattedChat, + ["typescript", "github", "cloudflare worker", "actions", "jest", "supabase", "openai"], + UBIQUITY_OS_APP_SLUG + ); } /** @@ -38,6 +79,9 @@ export async function askGpt(context: Context, question: string): Promise { +function removeUnwantedChars(text: string): string { + if (!text) { + return ""; + } return text.replace(/[^a-zA-Z0-9\s]/g, ""); -}; +} diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index 392f8da..1cd522e 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -1,38 +1,36 @@ import { splitKey } from "../helpers/issue"; -import { IssueComments, LinkedIssues, ReviewComments } from "../types/github"; +import { LinkedIssues, SimplifiedComment } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; +/** + * Get all streamlined comments from linked issues + * @param linkedIssues - The linked issues to get comments from + * @returns The streamlined comments which are grouped by issue key + */ export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { const streamlinedComments: Record = {}; - for (const issue of linkedIssues) { - const linkedIssueComments = issue.comments; - if (!linkedIssueComments) continue; - - if (linkedIssueComments.length > 0) { - const linkedStreamlinedComments = streamlineComments(linkedIssueComments); - - if (linkedStreamlinedComments) { - for (const [key, value] of Object.entries(linkedStreamlinedComments)) { - if (!streamlinedComments[key]) { - streamlinedComments[key] = value; - continue; - } - - const previous = streamlinedComments[key] || []; - streamlinedComments[key] = [...previous, ...value]; - } - } + const linkedIssueComments = issue.comments || []; + if (linkedIssueComments.length === 0) continue; + const linkedStreamlinedComments = streamlineComments(linkedIssueComments); + if (!linkedStreamlinedComments) continue; + for (const [key, value] of Object.entries(linkedStreamlinedComments)) { + streamlinedComments[key] = [...(streamlinedComments[key] || []), ...value]; } } - return streamlinedComments; } +/** + * Create a unique key for an issue based on its URL and optional issue number + * @param issueUrl - The URL of the issue + * @param issue - The optional issue number + * @returns The unique key for the issue + */ export function createKey(issueUrl: string, issue?: number) { const urlParts = issueUrl.split("/"); - let key = ""; + let key; if (urlParts.length === 7) { const [, , , issueOrg, issueRepo, , issueNumber] = urlParts; @@ -65,29 +63,28 @@ export function createKey(issueUrl: string, issue?: number) { return key; } -export function streamlineComments(comments: IssueComments | ReviewComments) { +/** + * Streamline comments by filtering out bot comments and organizing them by issue key + * @param comments - The comments to streamline + * @returns The streamlined comments grouped by issue key + */ +export function streamlineComments(comments: SimplifiedComment[]) { const streamlined: Record = {}; - for (const comment of comments) { - const user = comment.user; - if (user && user.type === "Bot") { - continue; - } - - const url = comment.html_url; - const body = comment.body; + const { user, issueUrl: url, body } = comment; + // Skip bot comments + if (user?.type === "Bot") continue; const key = createKey(url); const [owner, repo] = splitKey(key); if (!streamlined[key]) { streamlined[key] = []; } - if (user && body) { streamlined[key].push({ user: user.login, body, - id: comment.id, + id: parseInt(comment.id, 10), org: owner, repo, issueUrl: url, diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 5d03dd7..8bc115f 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -5,53 +5,62 @@ import { createKey, streamlineComments } from "../handlers/comments"; import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; import { splitKey } from "./issue"; -export async function formatChatHistory(context: Context, streamlined: Record, specAndBodies: Record) { - const convoKeys = Object.keys(streamlined); - const specAndBodyKeys = Object.keys(specAndBodies); - const chatHistory: string[] = []; - const currentIssueKey = createKey(context.payload.issue.html_url); - const keys: string[] = Array.from(new Set([...convoKeys, ...specAndBodyKeys, currentIssueKey])); - - for (const key of keys) { - const isCurrentIssue = key === currentIssueKey; - const block = await createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue); - chatHistory.push(block); - } - - return Array.from(new Set(chatHistory)).join(""); +/** + * Formats the chat history by combining streamlined comments and specifications or bodies for issues and pull requests. + * + * @param context - The context object containing information about the current GitHub event. + * @param streamlined - A record of streamlined comments for each issue or pull request. + * @param specAndBodies - A record of specifications or bodies for each issue or pull request. + * @returns A promise that resolves to a formatted string representing the chat history. + */ +export async function formatChatHistory( + context: Context, + streamlined: Record, + specAndBodies: Record +): Promise { + const keys = new Set([...Object.keys(streamlined), ...Object.keys(specAndBodies), createKey(context.payload.issue.html_url)]); + const chatHistory = await Promise.all( + Array.from(keys).map(async (key) => { + const isCurrentIssue = key === createKey(context.payload.issue.html_url); + return createContextBlockSection(context, key, streamlined, specAndBodies, isCurrentIssue); + }) + ); + return Array.from(new Set(chatHistory)); } -function getCorrectHeaderString(isPull: string | null, issueNumber: number, isCurrentIssue: boolean, isBody: boolean) { - const strings = { - pull: { - linked: `Linked Pull #${issueNumber} Request`, - current: `Current Pull #${issueNumber} Request`, - }, - issue: { - linked: `Linked Issue #${issueNumber} Specification`, - current: `Current Issue #${issueNumber} Specification`, - }, - convo: { - linked: `Linked Issue #${issueNumber} Conversation`, - current: `Current Issue #${issueNumber} Conversation`, - }, +/** + * Generates the correct header string based on the provided parameters. + * + * @param prDiff - The pull request diff string, if available. + * @param issueNumber - The issue number. + * @param isCurrentIssue - A boolean indicating if this is the current issue. + * @param isBody - A boolean indicating if this is for the body of the issue. + * @returns The formatted header string. + */ +function getCorrectHeaderString(prDiff: string | null, issueNumber: number, isCurrentIssue: boolean, isBody: boolean) { + const headerTemplates = { + pull: `Pull #${issueNumber} Request`, + issue: `Issue #${issueNumber} Specification`, + convo: `Issue #${issueNumber} Conversation`, }; - let header = ""; - - if (isPull) { - header = isCurrentIssue ? strings.pull.current : strings.pull.linked; - } else { - header = isCurrentIssue ? strings.issue.current : strings.issue.linked; - } - - if (isBody) { - header = isCurrentIssue ? strings.convo.current : strings.convo.linked; - } + const type = prDiff ? "pull" : "issue"; + const context = isCurrentIssue ? "current" : "linked"; + const bodyContext = isBody ? "convo" : type; - return header; + return `${context.charAt(0).toUpperCase() + context.slice(1)} ${headerTemplates[bodyContext]}`; } +/** + * Creates a context block section for the given issue or pull request. + * + * @param context - The context object containing information about the current GitHub event. + * @param key - The unique key representing the issue or pull request. + * @param streamlined - A record of streamlined comments for each issue or pull request. + * @param specAndBodies - A record of specifications or bodies for each issue or pull request. + * @param isCurrentIssue - A boolean indicating whether the key represents the current issue. + * @returns A formatted string representing the context block section. + */ async function createContextBlockSection( context: Context, key: string, @@ -60,30 +69,23 @@ async function createContextBlockSection( isCurrentIssue: boolean ) { let comments = streamlined[key]; - if (!comments || comments.length === 0) { const [owner, repo, number] = splitKey(key); - const { comments: comments_ } = await fetchIssueComments({ + const { comments: fetchedComments } = await fetchIssueComments({ context, owner, repo, issueNum: parseInt(number), }); - - comments = streamlineComments(comments_)[key]; + comments = streamlineComments(fetchedComments)[key]; } - const [org, repo, issueNum] = key.split("/"); - const issueNumber = parseInt(issueNum); - const prDiff = await fetchPullRequestDiff(context, org, repo, issueNumber); - if (!issueNumber || isNaN(issueNumber)) { throw context.logger.error("Issue number is not valid"); } - + const prDiff = await fetchPullRequestDiff(context, org, repo, issueNumber); const specHeader = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, false); - let specOrBody = specAndBodies[key]; if (!specOrBody) { specOrBody = @@ -97,65 +99,76 @@ async function createContextBlockSection( )?.body || "No specification or body available"; } const specOrBodyBlock = [createHeader(specHeader, key), createSpecOrBody(specOrBody), createFooter(specHeader)]; - const header = getCorrectHeaderString(prDiff, issueNumber, isCurrentIssue, true); const repoString = `${org}/${repo} #${issueNumber}`; - const block = [specOrBodyBlock.join(""), createHeader(header, repoString), createComment({ issueNumber, repo, org, comments }), createFooter(header)]; - if (!prDiff) { return block.join(""); } - const diffBlock = [createHeader("Linked Pull Request Code Diff", repoString), prDiff, createFooter("Linked Pull Request Code Diff")]; - return block.concat(diffBlock).join(""); } +/** + * Creates a header string for the given content and repository string. + * + * @param content - The content to include in the header. + * @param repoString - The repository string to include in the header. + * @returns A formatted header string. + */ function createHeader(content: string, repoString: string) { return `=== ${content} === ${repoString} ===\n\n`; } +/** + * Creates a footer string for the given content. + * + * @param content - The content to include in the footer. + * @returns A formatted footer string. + */ function createFooter(content: string) { return `=== End ${content} ===\n\n`; } +/** + * Creates a comment string from the StreamlinedComments object. + * + * @param comment - The StreamlinedComments object. + * @returns A string representing the comments. + */ function createComment(comment: StreamlinedComments) { if (!comment.comments) { return ""; } - const comments = []; - - // filter dupes - comment.comments = comment.comments?.filter((c, i, a) => a.findIndex((cc) => cc.id === c.id) === i); - - for (const c of comment.comments) { - comments.push(`${c.id} ${c.user}: ${c.body}\n`); - } - return comments.join(""); + // Remove duplicates + const uniqueComments = comment.comments.filter((c, i, a) => a.findIndex((cc) => cc.id === c.id) === i); + // Format comments + const formattedComments = uniqueComments.map((c) => `${c.id} ${c.user}: ${c.body}\n`); + return formattedComments.join(""); } +/** + * Creates a formatted string for the specification or body of an issue. + * + * @param specOrBody - The specification or body content. + * @returns A formatted string representing the specification or body. + */ function createSpecOrBody(specOrBody: string) { return `${specOrBody}\n`; } +/** + * Creates a chat history array from the formatted chat string. + * + * @param formattedChat - The formatted chat string. + * @returns An array of ChatCompletionMessageParam objects representing the chat history. + */ export function createChatHistory(formattedChat: string) { const chatHistory: ChatCompletionMessageParam[] = []; - - const systemMessage: ChatCompletionMessageParam = { - role: "system", - content: `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. - Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. - The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. - Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`, - }; - const userMessage: ChatCompletionMessageParam = { role: "user", content: formattedChat, }; - - chatHistory.push(systemMessage, userMessage); - + chatHistory.push(userMessage); return chatHistory; } diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index fc1653f..1484b80 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,72 +1,145 @@ import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; -import { FetchParams, Issue, IssueComments, LinkedIssues, ReviewComments } from "../types/github"; +import { IssueWithUser, SimplifiedComment, User } from "../types/github"; +import { FetchParams, Issue, Comments, LinkedIssues } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; -import { dedupeStreamlinedComments, idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; +import { + dedupeStreamlinedComments, + fetchCodeLinkedFromIssue, + idIssueFromComment, + mergeStreamlinedComments, + pullReadmeFromRepoForIssue, + splitKey, +} from "./issue"; import { handleIssue, handleSpec, handleSpecAndBodyKeys, throttlePromises } from "./issue-handling"; +/** + * Recursively fetches linked issues and processes them, including fetching comments and specifications. + * + * @param params - The parameters required to fetch the linked issues, including context and other details. + * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. + */ export async function recursivelyFetchLinkedIssues(params: FetchParams) { const { linkedIssues, seen, specAndBodies, streamlinedComments } = await fetchLinkedIssues(params); - const fetchPromises = linkedIssues.map(async (linkedIssue) => await mergeCommentsAndFetchSpec(params, linkedIssue, streamlinedComments, specAndBodies, seen)); await throttlePromises(fetchPromises, 10); - const linkedIssuesKeys = linkedIssues.map((issue) => createKey(`${issue.owner}/${issue.repo}/${issue.issueNumber}`)); const specAndBodyKeys = Array.from(new Set([...Object.keys(specAndBodies), ...Object.keys(streamlinedComments), ...linkedIssuesKeys])); - await handleSpecAndBodyKeys(specAndBodyKeys, params, dedupeStreamlinedComments(streamlinedComments), seen); return { linkedIssues, specAndBodies, streamlinedComments }; } +/** + * Fetches linked issues recursively and processes them. + * + * @param params - The parameters required to fetch the linked issues, including context and other details. + * @returns A promise that resolves to an object containing linked issues, specifications, streamlined comments, and seen issue keys. + */ export async function fetchLinkedIssues(params: FetchParams) { const { comments, issue } = await fetchIssueComments(params); if (!issue) { return { streamlinedComments: {}, linkedIssues: [], specAndBodies: {}, seen: new Set() }; } + if (!issue.body || !issue.html_url) { + throw new Error("Issue body or URL not found"); + } + + console.log(params, params.owner, params.repo); + if (!params.owner || !params.repo) { + throw new Error("Owner, repo, or issue number not found"); + } const issueKey = createKey(issue.html_url); const [owner, repo, issueNumber] = splitKey(issueKey); - const linkedIssues: LinkedIssues[] = [{ body: issue.body || "", comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; + const linkedIssues: LinkedIssues[] = [{ body: issue.body, comments, issueNumber: parseInt(issueNumber), owner, repo, url: issue.html_url }]; const specAndBodies: Record = {}; - const seen = new Set(); + const seen = new Set([issueKey]); - // add the spec body as a comment comments.push({ - body: issue.body || "", - // @ts-expect-error - github types undefined - user: issue.user, - id: issue.id, - html_url: issue.html_url, + body: issue.body, + user: issue.user as User, + id: issue.id.toString(), + org: params.owner, + repo: params.repo, + issueUrl: issue.html_url, }); + //Fetch the README of the repository + try { + const readme = await pullReadmeFromRepoForIssue(params); + if (readme) { + comments.push({ + body: readme, + user: issue.user as User, + id: issue.id.toString(), + org: params.owner, + repo: params.repo, + issueUrl: issue.html_url, + }); + } + } catch (error) { + params.context.logger.error(`Error fetching README`, { + error: error as Error, + owner, + repo, + issue, + }); + } + for (const comment of comments) { const foundIssues = idIssueFromComment(comment.body, params); + const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; + if (foundIssues) { for (const linkedIssue of foundIssues) { const linkedKey = createKey(linkedIssue.url, linkedIssue.issueNumber); - if (seen.has(linkedKey)) { - continue; - } - seen.add(linkedKey); - const { issueNumber, owner, repo } = linkedIssue; + if (seen.has(linkedKey)) continue; + seen.add(linkedKey); const { comments: fetchedComments, issue: fetchedIssue } = await fetchIssueComments({ context: params.context, - issueNum: issueNumber, - owner, - repo, + issueNum: linkedIssue.issueNumber, + owner: linkedIssue.owner, + repo: linkedIssue.repo, }); - specAndBodies[linkedKey] = fetchedIssue?.body || ""; - linkedIssue.body = fetchedIssue?.body || ""; + if (!fetchedIssue || !fetchedIssue.body) { + continue; + } + + specAndBodies[linkedKey] = fetchedIssue?.body; + linkedIssue.body = fetchedIssue?.body; linkedIssue.comments = fetchedComments; linkedIssues.push(linkedIssue); } } + + if (foundCodes) { + for (const code of foundCodes) { + comments.push({ + body: code.body, + user: code.user, + id: code.id, + org: code.org, + repo: code.repo, + issueUrl: code.issueUrl, + }); + } + } } - return { streamlinedComments: await getAllStreamlinedComments(linkedIssues), linkedIssues, specAndBodies, seen }; + const streamlinedComments = await getAllStreamlinedComments(linkedIssues); + return { streamlinedComments, linkedIssues, specAndBodies, seen }; } +/** + * Merges comments and fetches the specification for a linked issue. + * + * @param params - The parameters required to fetch the linked issue, including context and other details. + * @param linkedIssue - The linked issue for which comments and specifications need to be fetched. + * @param streamlinedComments - A record of streamlined comments associated with issues. + * @param specOrBodies - A record of specifications or bodies associated with issues. + * @param seen - A set of issue keys that have already been processed to avoid duplication. + */ export async function mergeCommentsAndFetchSpec( params: FetchParams, linkedIssue: LinkedIssues, @@ -79,17 +152,24 @@ export async function mergeCommentsAndFetchSpec( const merged = mergeStreamlinedComments(streamlinedComments, streamed); streamlinedComments = { ...streamlinedComments, ...merged }; } - if (linkedIssue.body) { await handleSpec(params, linkedIssue.body, specOrBodies, createKey(linkedIssue.url, linkedIssue.issueNumber), seen, streamlinedComments); } } -export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number) { - const { octokit } = context; - +/** + * Fetches the diff of a pull request. + * + * @param context - The context containing the octokit instance and logger. + * @param org - The organization or owner of the repository. + * @param repo - The name of the repository. + * @param issue - The pull request number. + * @returns A promise that resolves to the diff of the pull request as a string, or null if an error occurs. + */ +export async function fetchPullRequestDiff(context: Context, org: string, repo: string, issue: number): Promise { + const { octokit, logger } = context; try { - const diff = await octokit.pulls.get({ + const { data } = await octokit.pulls.get({ owner: org, repo, pull_number: issue, @@ -97,27 +177,37 @@ export async function fetchPullRequestDiff(context: Context, org: string, repo: format: "diff", }, }); - return diff.data as unknown as string; - } catch (e) { + return data as unknown as string; + } catch (error) { + logger.error(`Error fetching pull request diff`, { + error: error as Error, + owner: org, + repo, + pull_number: issue, + }); return null; } } -export async function fetchIssue(params: FetchParams) { +/** + * Fetches the details of a pull request. + * + * @param params - The parameters required to fetch the pull request, including context and other details. + * @returns A promise that resolves to the pull request details or null if an error occurs. + */ +export async function fetchIssue(params: FetchParams): Promise { const { octokit, payload, logger } = params.context; const { issueNum, owner, repo } = params; - try { - return await octokit.rest.issues - .get({ - owner: owner || payload.repository.owner.login, - repo: repo || payload.repository.name, - issue_number: issueNum || payload.issue.number, - }) - .then(({ data }) => data as Issue); - } catch (e) { - logger.error(`Error fetching issue `, { - e, + const response = await octokit.rest.issues.get({ + owner: owner || payload.repository.owner.login, + repo: repo || payload.repository.name, + issue_number: issueNum || payload.issue.number, + }); + return response.data as IssueWithUser; + } catch (error) { + logger.error(`Error fetching issue`, { + error: error as Error, owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, issue_number: issueNum || payload.issue.number, @@ -126,27 +216,32 @@ export async function fetchIssue(params: FetchParams) { } } +/** + * Fetches the comments for a given issue or pull request. + * + * @param params - The parameters required to fetch the issue comments, including context and other details. + * @returns A promise that resolves to an object containing the issue and its comments. + */ export async function fetchIssueComments(params: FetchParams) { const { octokit, payload, logger } = params.context; const { issueNum, owner, repo } = params; - const issue = await fetchIssue(params); - - let comments: IssueComments | ReviewComments = []; - + let comments: Comments = []; try { if (issue?.pull_request) { - comments = await octokit.paginate(octokit.pulls.listReviewComments, { + const response = await octokit.rest.pulls.listReviewComments({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, pull_number: issueNum || payload.issue.number, }); + comments = response.data; } else { - comments = await octokit.paginate(octokit.issues.listComments, { + const response = await octokit.rest.issues.listComments({ owner: owner || payload.repository.owner.login, repo: repo || payload.repository.name, issue_number: issueNum || payload.issue.number, }); + comments = response.data; } } catch (e) { logger.error(`Error fetching comments `, { @@ -157,13 +252,24 @@ export async function fetchIssueComments(params: FetchParams) { }); comments = []; } + comments = comments.filter((comment) => comment.user?.type !== "Bot") as Comments; + const simplifiedComments = castCommentsToSimplifiedComments(comments, params); return { issue, - comments: comments.filter((comment) => comment.user?.type !== "Bot") as IssueComments | ReviewComments, + comments: simplifiedComments, }; } +/** + * Fetches and handles an issue based on the provided key and parameters. + * + * @param key - The unique key representing the issue in the format "owner/repo/issueNumber". + * @param params - The parameters required to fetch the issue, including context and other details. + * @param streamlinedComments - A record of streamlined comments associated with issues. + * @param seen - A set of issue keys that have already been processed to avoid duplication. + * @returns A promise that resolves to an array of streamlined comments for the specified issue. + */ export async function fetchAndHandleIssue( key: string, params: FetchParams, @@ -171,6 +277,24 @@ export async function fetchAndHandleIssue( seen: Set ): Promise { const [owner, repo, issueNumber] = splitKey(key); - await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNumber) }, streamlinedComments, seen); + const issueParams = { ...params, owner, repo, issueNum: parseInt(issueNumber) }; + await handleIssue(issueParams, streamlinedComments, seen); return streamlinedComments[key] || []; } + +function castCommentsToSimplifiedComments(comments: Comments, params: FetchParams): SimplifiedComment[] { + if (!comments) { + return []; + } + return comments + .filter((comment) => comment.body !== undefined) + .map((comment) => ({ + id: comment.id.toString(), + org: params.owner || params.context.payload.repository.owner.login, + repo: params.repo || params.context.payload.repository.name, + issueUrl: comment.html_url, + body: comment.body as string, + user: comment.user as User, + url: comment.html_url, + })); +} diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index f10d998..2239354 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -4,6 +4,14 @@ import { StreamlinedComment } from "../types/gpt"; import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, mergeCommentsAndFetchSpec } from "./issue-fetching"; +/** + * Handles the processing of an issue. + * + * @param params - The parameters required to fetch and handle issues. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @param alreadySeen - A set of keys that have already been processed to avoid duplication. + * @returns A promise that resolves when the issue has been handled. + */ export async function handleIssue(params: FetchParams, streamlinedComments: Record, alreadySeen: Set) { if (alreadySeen.has(createKey(`${params.owner}/${params.repo}/${params.issueNum}`))) { return; @@ -14,6 +22,17 @@ export async function handleIssue(params: FetchParams, streamlinedComments: Reco return mergeStreamlinedComments(streamlinedComments, streamlined); } +/** + * Handles the processing of a specification or body text. + * + * @param params - The parameters required to fetch and handle issues. + * @param specOrBody - The specification or body text to be processed. + * @param specAndBodies - A record of specifications and bodies indexed by keys. + * @param key - The key associated with the current specification or body. + * @param seen - A set of keys that have already been processed to avoid duplication. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @returns A promise that resolves to the updated record of specifications and bodies. + */ export async function handleSpec( params: FetchParams, specOrBody: string, @@ -24,7 +43,6 @@ export async function handleSpec( ) { specAndBodies[key] = specOrBody; const otherReferences = idIssueFromComment(specOrBody, params); - if (otherReferences) { for (const ref of otherReferences) { const anotherKey = createKey(ref.url, ref.issueNumber); @@ -38,20 +56,31 @@ export async function handleSpec( repo: ref.repo, issueNum: ref.issueNumber, }); + if (!issue?.body) { + return; + } + if (issue?.body) { specAndBodies[anotherKey] = issue.body; } const [owner, repo, issueNum] = splitKey(anotherKey); if (!streamlinedComments[anotherKey]) { await handleIssue({ ...params, owner, repo, issueNum: parseInt(issueNum) }, streamlinedComments, seen); - await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue?.body || "", specAndBodies, anotherKey, seen, streamlinedComments); + await handleSpec({ ...params, owner, repo, issueNum: parseInt(issueNum) }, issue?.body, specAndBodies, anotherKey, seen, streamlinedComments); } } } - return specAndBodies; } +/** + * Handles the processing of a comment. + * + * @param params - The parameters required to fetch and handle issues. + * @param comment - The comment to be processed. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @param seen - A set of keys that have already been processed to avoid duplication. + */ export async function handleComment( params: FetchParams, comment: StreamlinedComment, @@ -59,7 +88,6 @@ export async function handleComment( seen: Set ) { const otherReferences = idIssueFromComment(comment.body, params); - if (otherReferences) { for (const ref of otherReferences) { const key = createKey(ref.url); @@ -72,6 +100,14 @@ export async function handleComment( } } +/** + * Handles the processing of specification and body keys. + * + * @param keys - An array of keys representing issues or comments to be processed. + * @param params - The parameters required to fetch and handle issues. + * @param streamlinedComments - A record of streamlined comments indexed by keys. + * @param seen - A set of keys that have already been processed to avoid duplication. + */ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, streamlinedComments: Record, seen: Set) { const commentProcessingPromises = keys.map(async (key) => { let comments = streamlinedComments[key]; @@ -83,24 +119,25 @@ export async function handleSpecAndBodyKeys(keys: string[], params: FetchParams, await handleComment(params, comment, streamlinedComments, seen); } }); - await throttlePromises(commentProcessingPromises, 10); } +/** + * Throttles the execution of promises to ensure that no more than the specified limit are running concurrently. + * + * @param promises - An array of promises to be executed. + * @param limit - The maximum number of promises to run concurrently. + */ export async function throttlePromises(promises: Promise[], limit: number) { const executing: Promise[] = []; - for (const promise of promises) { const p = promise.then(() => { void executing.splice(executing.indexOf(p), 1); }); - executing.push(p); - if (executing.length >= limit) { await Promise.race(executing); } } - await Promise.all(executing); } diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index e0ec8ca..5266a9c 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -1,17 +1,30 @@ import { createKey } from "../handlers/comments"; -import { FetchParams, LinkedIssues } from "../types/github"; +import { FetchedCodes, FetchParams, LinkedIssues } from "../types/github"; import { StreamlinedComment } from "../types/gpt"; +import { Context } from "../types/context"; // Import Context type +/** + * Removes duplicate streamlined comments based on their body content. + * + * @param streamlinedComments - The record of streamlined comments to deduplicate. + * @returns The deduplicated record of streamlined comments. + */ export function dedupeStreamlinedComments(streamlinedComments: Record) { for (const key of Object.keys(streamlinedComments)) { streamlinedComments[key] = streamlinedComments[key].filter( (comment: StreamlinedComment, index: number, self: StreamlinedComment[]) => index === self.findIndex((t: StreamlinedComment) => t.body === comment.body) ); } - return streamlinedComments; } +/** + * Merges new streamlined comments into existing streamlined comments. + * + * @param existingComments - The existing comments to merge into. + * @param newComments - The new comments to merge. + * @returns The merged comments. + */ export function mergeStreamlinedComments(existingComments: Record, newComments: Record) { if (!existingComments) { existingComments = {}; @@ -20,48 +33,73 @@ export function mergeStreamlinedComments(existingComments: Record 0) { + //Check if valid issue is in the params + if (params && !(params.issueNum && params.owner && params.repo)) { + return null; + } + + if (urlMatch) { urlMatch.forEach((url) => { response.push(createLinkedIssueOrPr(url)); }); } - /** - * These can only reference issues within the same repository - * so params works here - */ - const hashMatch = comment?.match(/#(\d+)/g); - if (hashMatch && hashMatch.length > 0) { - hashMatch.forEach((hash) => { - const issueNumber = hash.replace("#", ""); - const owner = params?.context.payload.repository?.owner?.login || ""; - const repo = params?.context.payload.repository?.name || ""; - response.push({ owner, repo, issueNumber: parseInt(issueNumber), url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` }); - }); - } - return response; + // This section handles issue references using markdown format (e.g., #123) + // const hashMatch = comment?.match(/#(\d+)/g); + // if (hashMatch) { + // const owner = params?.context.payload.repository?.owner?.login || ""; + // const repo = params?.context.payload.repository?.name || ""; + + // hashMatch.forEach((hash) => { + // const issueNumber = hash.replace("#", ""); + // response.push({ + // owner, + // repo, + // issueNumber: parseInt(issueNumber, 10), + // url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` + // }); + // }); + // } + + return response.length > 0 ? response : null; } +/** + * Creates a linked issue or pull request object from a given GitHub URL. + * + * @param url - The GitHub URL to create the linked issue or pull request from. + * @returns An object representing the linked issue or pull request. + */ function createLinkedIssueOrPr(url: string): LinkedIssues { const key = createKey(url); const [owner, repo, issueNumber] = splitKey(key); - return { owner, repo, @@ -69,3 +107,162 @@ function createLinkedIssueOrPr(url: string): LinkedIssues { url, }; } + +/** + * Fetches the code linked from a GitHub issue. + * + * @param issue - The issue string containing GitHub URLs. + * @param context - The context object containing the octokit instance. + * @param url - The URL of the issue. + * @param extensions - The list of file extensions to filter the linked files. + * @returns A promise that resolves to an array of fetched codes. + */ +export async function fetchCodeLinkedFromIssue( + issue: string, + context: Context, + url: string, + extensions: string[] = [".ts", ".json", ".sol"] +): Promise { + const { octokit } = context; + // Function to extract owner, repo, and path from a GitHub URL + function parseGitHubUrl(url: string): { owner: string; repo: string; path: string } | null { + const match = url.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+)\/blob\/[^/]+\/(.+)/); + return match ? { owner: match[1], repo: match[2], path: match[3] } : null; + } + // Function to check if a file has one of the specified extensions + function hasValidExtension(path: string) { + const cleanPath = path.split("#")[0]; // Remove any fragment identifiers like #L39-L49 + return extensions.some((ext) => cleanPath.toLowerCase().endsWith(ext.toLowerCase())); + } + //Function to remove Line numbers from the URL + function removeLineNumbers(url: string) { + const match = url.match(/(.*?)(#L\d+(-L\d+)?)/); + return match ? match[1] : url; + } + // Extract all GitHub URLs from the issue + const urls = issue.match(/https?:\/\/(www\.)?github\.com\/[^\s]+/g) || []; + // Process each URL + const results = await Promise.all( + urls.map(async (url) => { + let parsedUrl = parseGitHubUrl(url); + parsedUrl = parsedUrl ? { ...parsedUrl, path: removeLineNumbers(parsedUrl.path) } : null; + if (!parsedUrl || !hasValidExtension(parsedUrl.path)) return null; + console.log(`Fetching content from ${url}`); + try { + //Parse the commit sha from the URL + const commitSha = url.match(/https?:\/\/github\.com\/[^/]+\/[^/]+\/blob\/([^/]+)\/.+/); + let response; + if (commitSha) { + response = await octokit.repos.getContent({ + owner: parsedUrl.owner, + repo: parsedUrl.repo, + ref: commitSha ? commitSha[1] : "main", + path: parsedUrl.path, + }); + } else { + response = await octokit.repos.getContent({ + owner: parsedUrl.owner, + repo: parsedUrl.repo, + path: parsedUrl.path, + }); + } + + if ("content" in response.data) { + const content = Buffer.from(response.data.content, "base64").toString(); + return { body: content, id: parsedUrl.path }; + } + } catch (error) { + console.error(`Error fetching content from ${url}:`, error); + } + return null; + }) + ); + return results + .filter((result): result is { body: string; id: string } => result !== null) + .map((result) => ({ + ...result, + org: context.payload.repository.owner.login, + repo: context.payload.repository.name, + issueNumber: parseInt(issue.match(/\/issues\/(\d+)/)?.[1] || "0", 10), + issueUrl: url, + user: null, + })); +} + +/** + * Optimizes the context strings by removing duplicates and sorting by information density. + * Removes exact duplicates and sorts by information density and length. + * + * @param strings - The array of context strings to optimize. + * @returns The optimized array of context strings. + */ +export function optimizeContext(strings: string[]): string[] { + // Helper function to clean strings while preserving links + function cleanString(inputString: string): string { + // Preserve links by temporarily replacing them + const links: string[] = []; + inputString = inputString.replace(/https?:\/\/\S+/g, (match) => { + links.push(match); + return `__LINK${links.length - 1}__`; + }); + // Clean the string + inputString = inputString + .replace(/[^\w\s-/]|_/g, "") // Remove punctuation except '-' and '/' + .replace(/\s+/g, " ") + .trim() + .toLowerCase(); + // Restore links + inputString = inputString.replace(/__LINK(\d+)__/g, (i) => links[parseInt(i)]); + + return inputString; + } + // Helper function to calculate information density + function informationDensity(s: string): number { + const words = s.split(/\s+/); + const uniqueWords = new Set(words); + return uniqueWords.size / words.length; + } + // Clean and remove empty strings + const cleanedStrings = strings.map(cleanString).filter((s) => s.length > 0); + // Remove exact duplicates + const uniqueStrings = Array.from(new Set(cleanedStrings)); + // Sort strings by information density and length + uniqueStrings.sort((a, b) => { + const densityDiff = informationDensity(b) - informationDensity(a); + return densityDiff !== 0 ? densityDiff : b.length - a.length; + }); + const result: string[] = []; + const wordSet = new Set(); + for (const str of uniqueStrings) { + const words = str.split(/\s+/); + const newWords = words.filter((word) => !wordSet.has(word) && !word.startsWith("http")); + if (newWords.length > 0 || str.includes("http")) { + result.push(str); + newWords.forEach((word) => wordSet.add(word)); + } + } + return result; +} + +/** + * Extracts and returns the README content from the repository associated with the given issue. + * + * @param params - The parameters required to fetch the README, including the context with octokit instance. + * @returns The content of the README file as a string. + */ +export async function pullReadmeFromRepoForIssue(params: FetchParams): Promise { + let readme = undefined; + try { + const response = await params.context.octokit.repos.getContent({ + owner: params.context.payload.repository.owner?.login || params.context.payload.organization?.login || "", + repo: params.context.payload.repository.name, + path: "README.md", + }); + if ("content" in response.data) { + readme = Buffer.from(response.data.content, "base64").toString(); + } + } catch (error) { + throw new Error(`Error fetching README from repository: ${error}`); + } + return readme; +} diff --git a/src/plugin.ts b/src/plugin.ts index 9a6644f..9b93a5a 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -13,6 +13,7 @@ import OpenAI from "openai"; export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); const supabase = createClient(env.SUPABASE_URL, env.SUPABASE_KEY); + console.log("inputs", env); const voyageClient = new VoyageAIClient({ apiKey: env.VOYAGEAI_API_KEY, }); @@ -58,15 +59,11 @@ export async function runPlugin(context: Context) { try { const response = await askQuestion(context, question); const { answer, tokenUsage } = response; - if (!answer) { throw logger.error(`No answer from OpenAI`); } - logger.info(`Answer: ${answer}`, { tokenUsage }); - const tokens = `\n\n`; - commentToPost = answer + tokens; } catch (err) { let errorMessage; @@ -79,9 +76,9 @@ export async function runPlugin(context: Context) { } commentToPost = `${errorMessage?.logMessage.diff}\n`; } - await addCommentToIssue(context, commentToPost); } + function sanitizeMetadata(obj: LogReturn["metadata"]): string { return JSON.stringify(obj, null, 2).replace(//g, ">").replace(/--/g, "--"); } diff --git a/src/types/github.ts b/src/types/github.ts index eef93c0..55e3824 100644 --- a/src/types/github.ts +++ b/src/types/github.ts @@ -2,8 +2,13 @@ import { RestEndpointMethodTypes } from "@octokit/rest"; import { Context } from "./context"; export type Issue = RestEndpointMethodTypes["issues"]["get"]["response"]["data"]; -export type IssueComments = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"]; -export type ReviewComments = RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"]; +export type Comments = + | RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"] + | RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"]; +export type User = RestEndpointMethodTypes["users"]["getByUsername"]["response"]["data"]; + +//Modify the Issue add User Type +export type IssueWithUser = Issue & { user: User }; export type FetchParams = { context: Context; @@ -17,6 +22,25 @@ export type LinkedIssues = { repo: string; owner: string; url: string; - comments?: IssueComments | ReviewComments | null | undefined; + comments?: SimplifiedComment[] | null | undefined; body?: string; }; + +export type SimplifiedComment = { + user: User | null; + body: string; + id: string; + org: string; + repo: string; + issueUrl: string; +}; + +export type FetchedCodes = { + body: string; + user: User | null; + issueUrl: string; + id: string; + org: string; + repo: string; + issueNumber: number; +}; diff --git a/tests/main.test.ts b/tests/main.test.ts index f1f0cd9..86afb27 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -19,11 +19,6 @@ const LOG_CALLER = "_Logs."; const ISSUE_ID_2_CONTENT = "More context here #2"; const ISSUE_ID_3_CONTENT = "More context here #3"; -const systemMsg = `You are a GitHub integrated chatbot tasked with assisting in research and discussion on GitHub issues and pull requests. -Using the provided context, address the question being asked providing a clear and concise answer with no follow-up statements. -The LAST comment in 'Issue Conversation' is the most recent one, focus on it as that is the question being asked. -Use GitHub flavoured markdown in your response making effective use of lists, code blocks and other supported GitHub md features.`; - type Comment = { id: number; user: { @@ -81,7 +76,7 @@ describe("Ask plugin tests", () => { expect(infoSpy).toHaveBeenCalledWith("Comment is from a bot. Skipping."); }); - it("should not ask GPT a question if comment does not start with /gpt", async () => { + it("should not ask GPT a question if comment does not start with bot name", async () => { const ctx = createContext(TEST_QUESTION); const infoSpy = jest.spyOn(ctx.logger, "info"); @@ -112,33 +107,7 @@ describe("Ask plugin tests", () => { await runPlugin(ctx); expect(infoSpy).toHaveBeenCalledTimes(3); - - const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === - - This is a demo spec for a demo task just perfect for testing. - === End Current Issue #1 Specification === - - === Current Issue #1 Conversation === ubiquity/test-repo #1 === - - 1 ubiquity: ${TEST_QUESTION} - === End Current Issue #1 Conversation ===\n - `; - expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`); - expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { - caller: LOG_CALLER, - chat: [ - { - role: "system", - content: systemMsg, - }, - { - role: "user", - content: prompt, - }, - ], - }); - expect(infoSpy).toHaveBeenNthCalledWith(3, "Answer: This is a mock answer for the chat", { caller: LOG_CALLER, tokenUsage: { @@ -153,10 +122,10 @@ describe("Ask plugin tests", () => { const ctx = createContext(TEST_SLASH_COMMAND); const infoSpy = jest.spyOn(ctx.logger, "info"); createComments([ - transformCommentTemplate(1, 1, ISSUE_ID_2_CONTENT, "ubiquity", "test-repo", true), - transformCommentTemplate(2, 1, TEST_QUESTION, "ubiquity", "test-repo", true), - transformCommentTemplate(3, 2, ISSUE_ID_3_CONTENT, "ubiquity", "test-repo", true), - transformCommentTemplate(4, 3, "Just a comment", "ubiquity", "test-repo", true), + transformCommentTemplate(1, 1, ISSUE_ID_2_CONTENT, "ubiquity", "test-repo", true, "2"), + transformCommentTemplate(2, 1, TEST_QUESTION, "ubiquity", "test-repo", true, "1"), + transformCommentTemplate(3, 2, ISSUE_ID_3_CONTENT, "ubiquity", "test-repo", true, "3"), + transformCommentTemplate(4, 3, "Just a comment", "ubiquity", "test-repo", true, "1"), ]); await runPlugin(ctx); @@ -167,55 +136,50 @@ describe("Ask plugin tests", () => { const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 === -This is a demo spec for a demo task just perfect for testing. -=== End Current Issue #1 Specification === + This is a demo spec for a demo task just perfect for testing. + === End Current Issue #1 Specification === -=== Current Issue #1 Conversation === ubiquity/test-repo #1 === + === Current Issue #1 Conversation === ubiquity/test-repo #1 === -1 ubiquity: ${ISSUE_ID_2_CONTENT} -2 ubiquity: ${TEST_QUESTION} -=== End Current Issue #1 Conversation === + 1 ubiquity: ${ISSUE_ID_2_CONTENT} [#2](https://www.github.com/ubiquity/test-repo/issues/2) + 2 ubiquity: ${TEST_QUESTION} [#1](https://www.github.com/ubiquity/test-repo/issues/1) + === End Current Issue #1 Conversation === -=== Linked Issue #2 Specification === ubiquity/test-repo/2 === + === Linked Issue #2 Specification === ubiquity/test-repo/2 === -Related to issue #3 -=== End Linked Issue #2 Specification === + Related to issue #3 + === End Linked Issue #2 Specification === -=== Linked Issue #2 Conversation === ubiquity/test-repo #2 === + === Linked Issue #2 Conversation === ubiquity/test-repo #2 === -3 ubiquity: ${ISSUE_ID_3_CONTENT} -=== End Linked Issue #2 Conversation === + 3 ubiquity: ${ISSUE_ID_3_CONTENT} [#3](https://www.github.com/ubiquity/test-repo/issues/3) + === End Linked Issue #2 Conversation === -=== Linked Issue #3 Specification === ubiquity/test-repo/3 === + === Linked Issue #3 Specification === ubiquity/test-repo/3 === -Just another issue -=== End Linked Issue #3 Specification === + Just another issue + === End Linked Issue #3 Specification === -=== Linked Issue #3 Conversation === ubiquity/test-repo #3 === + === Linked Issue #3 Conversation === ubiquity/test-repo #3 === -4 ubiquity: Just a comment -=== End Linked Issue #3 Conversation ===\n -`; + 4 ubiquity: Just a comment [#1](https://www.github.com/ubiquity/test-repo/issues/1) + === End Linked Issue #3 Conversation ===\n + `; - expect(infoSpy).toHaveBeenNthCalledWith(2, "Sending chat to OpenAI", { - caller: LOG_CALLER, - chat: [ - { - role: "system", - content: systemMsg, - }, - { - role: "user", - content: prompt, - }, - ], - }); + const normalizedExpected = normalizeString(prompt); + const normalizedReceived = normalizeString(infoSpy.mock.calls[1][0]); + + expect(normalizedReceived).toEqual(normalizedExpected); }); }); // HELPERS -function transformCommentTemplate(commentId: number, issueNumber: number, body: string, owner: string, repo: string, isIssue = true) { +function normalizeString(str: string) { + return str.replace(/\s+/g, " ").trim(); +} + +function transformCommentTemplate(commentId: number, issueNumber: number, body: string, owner: string, repo: string, isIssue = true, linkTo: string = "1") { const COMMENT_TEMPLATE = { id: 1, user: { @@ -236,7 +200,7 @@ function transformCommentTemplate(commentId: number, issueNumber: number, body: login: COMMENT_TEMPLATE.user.login, type: "User", }, - body: body, + body: body + ` [#${linkTo}](${COMMENT_TEMPLATE.html_url.replace("1", linkTo.toString())})`, url: COMMENT_TEMPLATE.url.replace("1", issueNumber.toString()), html_url: COMMENT_TEMPLATE.html_url.replace("1", issueNumber.toString()), owner: owner, @@ -301,6 +265,8 @@ function createContext(body = TEST_SLASH_COMMAND) { installation: { id: 1 } as unknown as Context["payload"]["installation"], organization: { login: "ubiquity" } as unknown as Context["payload"]["organization"], }, + owner: "ubiquity", + repo: "test-repo", logger: new Logs("debug"), config: {}, env: { diff --git a/yarn.lock b/yarn.lock index dc9255b..d8c7ed1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -259,9 +259,9 @@ "@babel/helper-plugin-utils" "^7.24.6" "@babel/runtime@^7.21.0": - version "7.24.6" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.6.tgz#5b76eb89ad45e2e4a0a8db54c456251469a3358e" - integrity sha512-Ja18XcETdEl5mzzACGd+DKgaGJzPTCow7EglgwTmHdwokzDFYh/MHua6lU6DV/hjF2IaOJ4oX2nqnjG7RElKOw== + version "7.25.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.25.7.tgz#7ffb53c37a8f247c8c4d335e89cdf16a2e0d0fb6" + integrity sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w== dependencies: regenerator-runtime "^0.14.0" @@ -318,6 +318,14 @@ dependencies: statuses "^2.0.1" +"@bundled-es-modules/tough-cookie@^0.1.6": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@bundled-es-modules/tough-cookie/-/tough-cookie-0.1.6.tgz#fa9cd3cedfeecd6783e8b0d378b4a99e52bde5d3" + integrity sha512-dvMHbL464C0zI+Yqxbz6kZ5TOEp7GLW+pry/RWndAR8MJQAXZ2rPmIs8tziTZjeIyhSNZgZbCePtfSbdWqStJw== + dependencies: + "@types/tough-cookie" "^4.0.5" + tough-cookie "^4.1.4" + "@cloudflare/kv-asset-handler@0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.3.2.tgz#06437b75664729823ac9033b89f06a3b078e4f55" @@ -1092,41 +1100,49 @@ integrity sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew== "@inquirer/confirm@^3.0.0": - version "3.1.9" - resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-3.1.9.tgz#1bc384bc8267827ec75d0684e189692bb4dda38b" - integrity sha512-UF09aejxCi4Xqm6N/jJAiFXArXfi9al52AFaSD+2uIHnhZGtd1d6lIGTRMPouVSJxbGEi+HkOWSYaiEY/+szUw== + version "3.2.0" + resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-3.2.0.tgz#6af1284670ea7c7d95e3f1253684cfbd7228ad6a" + integrity sha512-oOIwPs0Dvq5220Z8lGL/6LHRTEr9TgLHmiI99Rj1PJ1p1czTys+olrgBqZk4E2qC0YTzeHprxSQmoHioVdJ7Lw== dependencies: - "@inquirer/core" "^8.2.2" - "@inquirer/type" "^1.3.3" + "@inquirer/core" "^9.1.0" + "@inquirer/type" "^1.5.3" -"@inquirer/core@^8.2.2": - version "8.2.2" - resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-8.2.2.tgz#797b1e71b920c9788b9d26d89c8b334149852d52" - integrity sha512-K8SuNX45jEFlX3EBJpu9B+S2TISzMPGXZIuJ9ME924SqbdW6Pt6fIkKvXg7mOEOKJ4WxpQsxj0UTfcL/A434Ww== +"@inquirer/core@^9.1.0": + version "9.2.1" + resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-9.2.1.tgz#677c49dee399c9063f31e0c93f0f37bddc67add1" + integrity sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg== dependencies: - "@inquirer/figures" "^1.0.3" - "@inquirer/type" "^1.3.3" + "@inquirer/figures" "^1.0.6" + "@inquirer/type" "^2.0.0" "@types/mute-stream" "^0.0.4" - "@types/node" "^20.12.13" + "@types/node" "^22.5.5" "@types/wrap-ansi" "^3.0.0" ansi-escapes "^4.3.2" - chalk "^4.1.2" - cli-spinners "^2.9.2" cli-width "^4.1.0" mute-stream "^1.0.0" signal-exit "^4.1.0" strip-ansi "^6.0.1" wrap-ansi "^6.2.0" + yoctocolors-cjs "^2.1.2" -"@inquirer/figures@^1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.3.tgz#1227cc980f88e6d6ab85abadbf164f5038041edd" - integrity sha512-ErXXzENMH5pJt5/ssXV0DfWUZqly8nGzf0UcBV9xTnP+KyffE2mqyxIMBrZ8ijQck2nU0TQm40EQB53YreyWHw== +"@inquirer/figures@^1.0.6": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.7.tgz#d050ccc0eabfacc0248c4ff647a9dfba1b01594b" + integrity sha512-m+Trk77mp54Zma6xLkLuY+mvanPxlE4A7yNKs2HBiyZ4UkVs28Mv5c/pgWrHeInx+USHeX/WEPzjrWrcJiQgjw== + +"@inquirer/type@^1.5.3": + version "1.5.5" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-1.5.5.tgz#303ea04ce7ad2e585b921b662b3be36ef7b4f09b" + integrity sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA== + dependencies: + mute-stream "^1.0.0" -"@inquirer/type@^1.3.3": - version "1.3.3" - resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-1.3.3.tgz#26b2628630fd2381c7fa1e3ab396feb9bbc575da" - integrity sha512-xTUt0NulylX27/zMx04ZYar/kr1raaiFTVvQ5feljQsiAgdm0WPj4S73/ye0fbslh+15QrIuDvfCXTek7pMY5A== +"@inquirer/type@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@inquirer/type/-/type-2.0.0.tgz#08fa513dca2cb6264fe1b0a2fabade051444e3f6" + integrity sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag== + dependencies: + mute-stream "^1.0.0" "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" @@ -1376,15 +1392,10 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" -"@mswjs/cookies@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@mswjs/cookies/-/cookies-1.1.0.tgz#1528eb43630caf83a1d75d5332b30e75e9bb1b5b" - integrity sha512-0ZcCVQxifZmhwNBoQIrystCb+2sWBY2Zw8lpfJBPCHGCA/HWqehITeCRVIv4VMy8MPlaHo2w2pTHFV2pFfqKPw== - -"@mswjs/data@0.16.1": - version "0.16.1" - resolved "https://registry.yarnpkg.com/@mswjs/data/-/data-0.16.1.tgz#ee41b95b8f2e954a07b0eb54154592a2459064d1" - integrity sha512-VhJvL/VmgAuU9/tDOcKcxHfNd+8nxYntZnrkaQEQPvZZnFwQQR9bzI1FTRROGxCHVoyfv9v84AEkl/7CIw4FAg== +"@mswjs/data@^0.16.2": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@mswjs/data/-/data-0.16.2.tgz#61d14dcb28851b25b2ca97e343d40d57870670ec" + integrity sha512-/C0d/PBcJyQJokUhcjO4HiZPc67hzllKlRtD1XELygl2t991/ATAAQJVcStn4YtVALsNodruzOHT0JIvgr0hnA== dependencies: "@types/lodash" "^4.14.172" "@types/md5" "^2.3.0" @@ -1402,16 +1413,16 @@ optionalDependencies: msw "^2.0.8" -"@mswjs/interceptors@^0.29.0": - version "0.29.1" - resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.29.1.tgz#e77fc58b5188569041d0440b25c9e9ebb1ccd60a" - integrity sha512-3rDakgJZ77+RiQUuSK69t1F0m8BQKA8Vh5DCS5V0DWvNY67zob2JhhQrhCO0AKLGINTRSFd1tBaHcJTkhefoSw== +"@mswjs/interceptors@^0.35.8": + version "0.35.9" + resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.35.9.tgz#1e1488ff2f333683d374eccc8c0f4d5d851c6d3d" + integrity sha512-SSnyl/4ni/2ViHKkiZb8eajA/eN1DNFaHjhGiLUdZvDz6PKF4COSf/17xqSz64nOo2Ia29SA6B2KNCsyCbVmaQ== dependencies: "@open-draft/deferred-promise" "^2.2.0" "@open-draft/logger" "^0.3.0" "@open-draft/until" "^2.0.0" is-node-process "^1.2.0" - outvariant "^1.2.1" + outvariant "^1.4.3" strict-event-emitter "^0.5.1" "@nodelib/fs.scandir@2.1.5": @@ -1634,6 +1645,63 @@ ignore "^5.1.8" p-map "^4.0.0" +"@supabase/auth-js@2.65.0": + version "2.65.0" + resolved "https://registry.yarnpkg.com/@supabase/auth-js/-/auth-js-2.65.0.tgz#e345c492f8cbc31cd6289968eae0e349ff0f39e9" + integrity sha512-+wboHfZufAE2Y612OsKeVP4rVOeGZzzMLD/Ac3HrTQkkY4qXNjI6Af9gtmxwccE5nFvTiF114FEbIQ1hRq5uUw== + dependencies: + "@supabase/node-fetch" "^2.6.14" + +"@supabase/functions-js@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@supabase/functions-js/-/functions-js-2.4.1.tgz#373e75f8d3453bacd71fb64f88d7a341d7b53ad7" + integrity sha512-8sZ2ibwHlf+WkHDUZJUXqqmPvWQ3UHN0W30behOJngVh/qHHekhJLCFbh0AjkE9/FqqXtf9eoVvmYgfCLk5tNA== + dependencies: + "@supabase/node-fetch" "^2.6.14" + +"@supabase/node-fetch@2.6.15", "@supabase/node-fetch@^2.6.14": + version "2.6.15" + resolved "https://registry.yarnpkg.com/@supabase/node-fetch/-/node-fetch-2.6.15.tgz#731271430e276983191930816303c44159e7226c" + integrity sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ== + dependencies: + whatwg-url "^5.0.0" + +"@supabase/postgrest-js@1.16.1": + version "1.16.1" + resolved "https://registry.yarnpkg.com/@supabase/postgrest-js/-/postgrest-js-1.16.1.tgz#68dfa0581d8ae4296378cb8815bbde3f4602aef5" + integrity sha512-EOSEZFm5pPuCPGCmLF1VOCS78DfkSz600PBuvBND/IZmMciJ1pmsS3ss6TkB6UkuvTybYiBh7gKOYyxoEO3USA== + dependencies: + "@supabase/node-fetch" "^2.6.14" + +"@supabase/realtime-js@2.10.2": + version "2.10.2" + resolved "https://registry.yarnpkg.com/@supabase/realtime-js/-/realtime-js-2.10.2.tgz#c2b42d17d723d2d2a9146cfad61dc3df1ce3127e" + integrity sha512-qyCQaNg90HmJstsvr2aJNxK2zgoKh9ZZA8oqb7UT2LCh3mj9zpa3Iwu167AuyNxsxrUE8eEJ2yH6wLCij4EApA== + dependencies: + "@supabase/node-fetch" "^2.6.14" + "@types/phoenix" "^1.5.4" + "@types/ws" "^8.5.10" + ws "^8.14.2" + +"@supabase/storage-js@2.7.0": + version "2.7.0" + resolved "https://registry.yarnpkg.com/@supabase/storage-js/-/storage-js-2.7.0.tgz#9ff322d2c3b141087aa34115cf14205e4980ce75" + integrity sha512-iZenEdO6Mx9iTR6T7wC7sk6KKsoDPLq8rdu5VRy7+JiT1i8fnqfcOr6mfF2Eaqky9VQzhP8zZKQYjzozB65Rig== + dependencies: + "@supabase/node-fetch" "^2.6.14" + +"@supabase/supabase-js@^2.45.4": + version "2.45.4" + resolved "https://registry.yarnpkg.com/@supabase/supabase-js/-/supabase-js-2.45.4.tgz#0bcf8722f1732dfe3e4c5190d23e3938dcc689c3" + integrity sha512-E5p8/zOLaQ3a462MZnmnz03CrduA5ySH9hZyL03Y+QZLIOO4/Gs8Rdy4ZCKDHsN7x0xdanVEWWFN3pJFQr9/hg== + dependencies: + "@supabase/auth-js" "2.65.0" + "@supabase/functions-js" "2.4.1" + "@supabase/node-fetch" "2.6.15" + "@supabase/postgrest-js" "1.16.1" + "@supabase/realtime-js" "2.10.2" + "@supabase/storage-js" "2.7.0" + "@types/babel__core@^7.1.14": version "7.20.5" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017" @@ -1714,9 +1782,9 @@ pretty-format "^29.0.0" "@types/lodash@^4.14.172": - version "4.17.4" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.4.tgz#0303b64958ee070059e3a7184048a55159fe20b7" - integrity sha512-wYCP26ZLxaT3R39kiN2+HcJ4kTd3U1waI/cY7ivWYqFP6pW3ZNpvi6Wd6PHZx7T/t8z0vlkXMg3QYLa7DZ/IJQ== + version "4.17.10" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.10.tgz#64f3edf656af2fe59e7278b73d3e62404144a6e6" + integrity sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ== "@types/md5@^2.3.0": version "2.3.5" @@ -1745,7 +1813,7 @@ dependencies: "@types/node" "*" -"@types/node@*", "@types/node@^20.12.13": +"@types/node@*": version "20.13.0" resolved "https://registry.yarnpkg.com/@types/node/-/node-20.13.0.tgz#011a76bc1e71ae9a026dddcfd7039084f752c4b6" integrity sha512-FM6AOb3khNkNIXPnHFDYaHerSv8uN22C91z098AnGccVu+Pcdhi+pNUFDi0iLmPIsVE0JBD0KVS7mzUYt4nRzQ== @@ -1766,6 +1834,18 @@ dependencies: undici-types "~5.26.4" +"@types/node@^22.5.5": + version "22.7.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.7.5.tgz#cfde981727a7ab3611a481510b473ae54442b92b" + integrity sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ== + dependencies: + undici-types "~6.19.2" + +"@types/phoenix@^1.5.4": + version "1.6.5" + resolved "https://registry.yarnpkg.com/@types/phoenix/-/phoenix-1.6.5.tgz#5654e14ec7ad25334a157a20015996b6d7d2075e" + integrity sha512-xegpDuR+z0UqG9fwHqNoy3rI7JDlvaPh2TY47Fl80oq6g+hXT+c/LEuE43X48clZ6lOfANl5WrPur9fYO1RJ/w== + "@types/pluralize@^0.0.29": version "0.0.29" resolved "https://registry.yarnpkg.com/@types/pluralize/-/pluralize-0.0.29.tgz#6ffa33ed1fc8813c469b859681d09707eb40d03c" @@ -1781,6 +1861,11 @@ resolved "https://registry.yarnpkg.com/@types/statuses/-/statuses-2.0.5.tgz#f61ab46d5352fd73c863a1ea4e1cef3b0b51ae63" integrity sha512-jmIUGWrAiwu3dZpxntxieC+1n/5c3mjrImkmOSQ2NC5uP6cYO4aAZDdSmRcI5C1oiTmqlZGHC+/NmJrKogbP5A== +"@types/tough-cookie@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.5.tgz#cb6e2a691b70cb177c6e3ae9c1d2e8b2ea8cd304" + integrity sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA== + "@types/uuid@^8.3.0": version "8.3.4" resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.4.tgz#bd86a43617df0594787d38b735f55c805becf1bc" @@ -1791,6 +1876,13 @@ resolved "https://registry.yarnpkg.com/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz#18b97a972f94f60a679fd5c796d96421b9abb9fd" integrity sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g== +"@types/ws@^8.5.10": + version "8.5.12" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.12.tgz#619475fe98f35ccca2a2f6c137702d85ec247b7e" + integrity sha512-3tPRkv1EtkDpzlgyKyI8pGsGZAGPEaXeu0DOj5DI25Ja91bdAYddYHbADRYVrZMRbfW+1l5YwXVDKohDJNQxkQ== + dependencies: + "@types/node" "*" + "@types/yargs-parser@*": version "21.0.3" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" @@ -2390,11 +2482,6 @@ cli-cursor@^4.0.0: dependencies: restore-cursor "^4.0.0" -cli-spinners@^2.9.2: - version "2.9.2" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" - integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== - cli-truncate@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-4.0.0.tgz#6cc28a2924fee9e25ce91e973db56c7066e6172a" @@ -3593,9 +3680,9 @@ graphemer@^1.4.0: integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== graphql@^16.8.1: - version "16.8.1" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.8.1.tgz#1930a965bef1170603702acdb68aedd3f3cf6f07" - integrity sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw== + version "16.9.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.9.0.tgz#1c310e63f16a49ce1fbb230bd0a000e99f6f115f" + integrity sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw== has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" @@ -4874,15 +4961,15 @@ ms@^2.0.0: integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== msw@^2.0.8: - version "2.3.1" - resolved "https://registry.yarnpkg.com/msw/-/msw-2.3.1.tgz#bfc73e256ffc2c74ec4381b604abb258df35f32b" - integrity sha512-ocgvBCLn/5l3jpl1lssIb3cniuACJLoOfZu01e3n5dbJrpA5PeeWn28jCLgQDNt6d7QT8tF2fYRzm9JoEHtiig== + version "2.4.10" + resolved "https://registry.yarnpkg.com/msw/-/msw-2.4.10.tgz#148d809f8e9dfd3c7d0abb93c1641bfaea877b9e" + integrity sha512-bDQh9b25JK4IKMs5hnamwAkcNZ9RwA4mR/4YcgWkzwHOxj7UICbVJfmChJvY1UCAAMraPpvjHdxjoUDpc3F+Qw== dependencies: "@bundled-es-modules/cookie" "^2.0.0" "@bundled-es-modules/statuses" "^1.0.1" + "@bundled-es-modules/tough-cookie" "^0.1.6" "@inquirer/confirm" "^3.0.0" - "@mswjs/cookies" "^1.1.0" - "@mswjs/interceptors" "^0.29.0" + "@mswjs/interceptors" "^0.35.8" "@open-draft/until" "^2.1.0" "@types/cookie" "^0.6.0" "@types/statuses" "^2.0.4" @@ -4891,7 +4978,7 @@ msw@^2.0.8: headers-polyfill "^4.0.2" is-node-process "^1.2.0" outvariant "^1.4.2" - path-to-regexp "^6.2.0" + path-to-regexp "^6.3.0" strict-event-emitter "^0.5.1" type-fest "^4.9.0" yargs "^17.7.2" @@ -5073,10 +5160,10 @@ optionator@^0.9.3: type-check "^0.4.0" word-wrap "^1.2.5" -outvariant@^1.2.1, outvariant@^1.4.0, outvariant@^1.4.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.2.tgz#f54f19240eeb7f15b28263d5147405752d8e2066" - integrity sha512-Ou3dJ6bA/UJ5GVHxah4LnqDwZRwAmWxrG3wtrHrbGnP4RnLCtA64A4F+ae7Y8ww660JaddSoArUR5HjipWSHAQ== +outvariant@^1.2.1, outvariant@^1.4.0, outvariant@^1.4.2, outvariant@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.3.tgz#221c1bfc093e8fec7075497e7799fdbf43d14873" + integrity sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA== p-limit@^2.2.0: version "2.3.0" @@ -5214,6 +5301,11 @@ path-to-regexp@^6.2.0: resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.2.tgz#324377a83e5049cbecadc5554d6a63a9a4866b36" integrity sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw== +path-to-regexp@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4" + integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ== + path-type@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" @@ -5339,7 +5431,12 @@ prompts@^2.0.1: kleur "^3.0.3" sisteransi "^1.0.5" -punycode@^2.1.0: +psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^2.1.0, punycode@^2.1.1: version "2.3.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== @@ -5356,6 +5453,11 @@ qs@6.11.2: dependencies: side-channel "^1.0.4" +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -5432,6 +5534,11 @@ require-from-string@^2.0.2: resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" @@ -6000,6 +6107,16 @@ to-space-case@^1.0.0: dependencies: to-no-case "^1.0.0" +tough-cookie@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.4.tgz#945f1461b45b5a8c76821c33ea49c3ac192c1b36" + integrity sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" @@ -6056,9 +6173,9 @@ type-fest@^0.21.3: integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^4.9.0: - version "4.18.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.18.3.tgz#5249f96e7c2c3f0f1561625f54050e343f1c8f68" - integrity sha512-Q08/0IrpvM+NMY9PA2rti9Jb+JejTddwmwmVQGskAlhtcrw1wsRzoR6ode6mR+OAabNa75w/dxedSUY2mlphaQ== + version "4.26.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.26.1.tgz#a4a17fa314f976dd3e6d6675ef6c775c16d7955e" + integrity sha512-yOGpmOAL7CkKe/91I5O3gPICmJNLJ1G4zFYVAsRHg7M64biSnPtRj0WNQt++bRkjYOqjWXrhnUw1utzmVErAdg== typebox-validators@0.3.5: version "0.3.5" @@ -6143,6 +6260,11 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +undici-types@~6.19.2: + version "6.19.8" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" + integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== + undici@^5.28.2: version "5.28.4" resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" @@ -6179,6 +6301,11 @@ universal-user-agent@^6.0.0: resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + update-browserslist-db@^1.0.13: version "1.0.16" resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz#f6d489ed90fb2f07d67784eb3f53d7891f736356" @@ -6199,6 +6326,14 @@ url-join@4.0.1: resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7" integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA== +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -6406,6 +6541,11 @@ ws@^8.11.0: resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.0.tgz#d145d18eca2ed25aaf791a183903f7be5e295fea" integrity sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow== +ws@^8.14.2: + version "8.18.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" + integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== + xdg-basedir@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-5.1.0.tgz#1efba19425e73be1bc6f2a6ceb52a3d2c884c0c9" @@ -6469,6 +6609,11 @@ yocto-queue@^1.0.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== +yoctocolors-cjs@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz#f4b905a840a37506813a7acaa28febe97767a242" + integrity sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA== + youch@^3.2.2: version "3.3.3" resolved "https://registry.yarnpkg.com/youch/-/youch-3.3.3.tgz#50cfdf5bc395ce664a5073e31b712ff4a859d928" From dd4c33432a27914049cb76b30d3f6220591337ba Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Fri, 11 Oct 2024 23:31:38 -0400 Subject: [PATCH 64/72] fix: tests --- src/helpers/issue-fetching.ts | 2 +- src/helpers/issue-handling.ts | 4 ++-- src/helpers/issue.ts | 7 +------ 3 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 1484b80..5ad8ca2 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -86,7 +86,7 @@ export async function fetchLinkedIssues(params: FetchParams) { } for (const comment of comments) { - const foundIssues = idIssueFromComment(comment.body, params); + const foundIssues = idIssueFromComment(comment.body); const foundCodes = comment.body ? await fetchCodeLinkedFromIssue(comment.body, params.context, comment.issueUrl) : []; if (foundIssues) { diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 2239354..440926e 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -42,7 +42,7 @@ export async function handleSpec( streamlinedComments: Record ) { specAndBodies[key] = specOrBody; - const otherReferences = idIssueFromComment(specOrBody, params); + const otherReferences = idIssueFromComment(specOrBody); if (otherReferences) { for (const ref of otherReferences) { const anotherKey = createKey(ref.url, ref.issueNumber); @@ -87,7 +87,7 @@ export async function handleComment( streamlinedComments: Record, seen: Set ) { - const otherReferences = idIssueFromComment(comment.body, params); + const otherReferences = idIssueFromComment(comment.body); if (otherReferences) { for (const ref of otherReferences) { const key = createKey(ref.url); diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 5266a9c..0bf2779 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -57,15 +57,10 @@ export function splitKey(key: string): [string, string, string] { * @param params - Additional parameters that may include context information. * @returns An array of linked issues or null if no issues are found. */ -export function idIssueFromComment(comment?: string | null, params?: FetchParams): LinkedIssues[] | null { +export function idIssueFromComment(comment?: string | null): LinkedIssues[] | null { const urlMatch = comment?.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+)\/(pull|issues?)\/(\d+)/g); const response: LinkedIssues[] = []; - //Check if valid issue is in the params - if (params && !(params.issueNum && params.owner && params.repo)) { - return null; - } - if (urlMatch) { urlMatch.forEach((url) => { response.push(createLinkedIssueOrPr(url)); From e37f585488e673bf88a03cb919bedb58fb5cb52b Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Fri, 11 Oct 2024 23:40:58 -0400 Subject: [PATCH 65/72] fix: tests and knip --- .github/knip.ts | 2 +- README.md | 2 +- src/handlers/ask-gpt.ts | 4 ++-- src/helpers/format-chat-history.ts | 17 ----------------- src/plugin.ts | 4 ++-- src/types/env.ts | 2 +- tests/main.test.ts | 2 +- 7 files changed, 8 insertions(+), 25 deletions(-) diff --git a/.github/knip.ts b/.github/knip.ts index 17857ad..3eb78f9 100644 --- a/.github/knip.ts +++ b/.github/knip.ts @@ -6,7 +6,7 @@ const config: KnipConfig = { ignore: ["src/types/config.ts", "**/__mocks__/**", "**/__fixtures__/**"], ignoreExportsUsedInFile: true, // eslint can also be safely ignored as per the docs: https://knip.dev/guides/handling-issues#eslint--jest - ignoreDependencies: ["eslint-config-prettier", "eslint-plugin-prettier", "@mswjs/data"], + ignoreDependencies: ["eslint-config-prettier", "eslint-plugin-prettier"], eslint: true, }; diff --git a/README.md b/README.md index e74704e..ae289dc 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ plugins: ```sh # OpenAI API key OPENAI_API_KEY=your-api-key -UBIQUITY_OS_APP_SLUG="UbiquityOS" +UBIQUITY_OS_APP_NAME="UbiquityOS" ``` diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-gpt.ts index 9cb1b02..b9992ac 100644 --- a/src/handlers/ask-gpt.ts +++ b/src/handlers/ask-gpt.ts @@ -36,7 +36,7 @@ export async function askQuestion(context: Context, question: string) { **/ export async function askGpt(context: Context, question: string, formattedChat: string[]): Promise { const { - env: { UBIQUITY_OS_APP_SLUG }, + env: { UBIQUITY_OS_APP_NAME }, config: { model, similarityThreshold }, } = context; let similarComments: CommentSimilaritySearchResult[] = []; @@ -70,7 +70,7 @@ export async function askGpt(context: Context, question: string, formattedChat: rerankedText, formattedChat, ["typescript", "github", "cloudflare worker", "actions", "jest", "supabase", "openai"], - UBIQUITY_OS_APP_SLUG + UBIQUITY_OS_APP_NAME ); } diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 8bc115f..2d01711 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -1,4 +1,3 @@ -import { ChatCompletionMessageParam } from "openai/resources"; import { Context } from "../types"; import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; import { createKey, streamlineComments } from "../handlers/comments"; @@ -156,19 +155,3 @@ function createComment(comment: StreamlinedComments) { function createSpecOrBody(specOrBody: string) { return `${specOrBody}\n`; } - -/** - * Creates a chat history array from the formatted chat string. - * - * @param formattedChat - The formatted chat string. - * @returns An array of ChatCompletionMessageParam objects representing the chat history. - */ -export function createChatHistory(formattedChat: string) { - const chatHistory: ChatCompletionMessageParam[] = []; - const userMessage: ChatCompletionMessageParam = { - role: "user", - content: formattedChat, - }; - chatHistory.push(userMessage); - return chatHistory; -} diff --git a/src/plugin.ts b/src/plugin.ts index 9b93a5a..c74a00f 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -38,10 +38,10 @@ export async function plugin(inputs: PluginInputs, env: Env) { export async function runPlugin(context: Context) { const { logger, - env: { UBIQUITY_OS_APP_SLUG }, + env: { UBIQUITY_OS_APP_NAME }, } = context; const question = context.payload.comment.body; - const slugRegex = new RegExp(`@${UBIQUITY_OS_APP_SLUG} `, "gi"); + const slugRegex = new RegExp(`@${UBIQUITY_OS_APP_NAME} `, "gi"); if (!question.match(slugRegex)) { logger.info("Comment does not mention the app. Skipping."); return; diff --git a/src/types/env.ts b/src/types/env.ts index a2d9343..a378990 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -13,7 +13,7 @@ dotenv.config(); */ export const envSchema = T.Object({ OPENAI_API_KEY: T.String(), - UBIQUITY_OS_APP_SLUG: T.String(), + UBIQUITY_OS_APP_NAME: T.String(), VOYAGEAI_API_KEY: T.String(), SUPABASE_URL: T.String(), SUPABASE_KEY: T.String(), diff --git a/tests/main.test.ts b/tests/main.test.ts index 86afb27..51ca676 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -270,7 +270,7 @@ function createContext(body = TEST_SLASH_COMMAND) { logger: new Logs("debug"), config: {}, env: { - UBIQUITY_OS_APP_SLUG: "UbiquityOS", + UBIQUITY_OS_APP_NAME: "UbiquityOS", OPENAI_API_KEY: "test", }, adapters: { From a55fb00de9af5d0f16dfe749dcadd83c1205f437 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E3=82=A2=E3=83=AC=E3=82=AF=E3=82=B5=E3=83=B3=E3=83=80?= =?UTF-8?q?=E3=83=BC=2Eeth?= <4975670+0x4007@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:15:38 +0900 Subject: [PATCH 66/72] Update package.json --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 66b7373..f12199a 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "@ubiquity-os/command-ask", "version": "1.0.0", "description": "A highly context aware organization integrated chatbot", - "author": "Ubiquity OS", + "author": "Ubiquity DAO", "license": "MIT", "main": "src/worker.ts", "engines": { From 0f00dd4d62fbff74ca315ab4c97e975a30213f0a Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Thu, 17 Oct 2024 01:10:18 -0400 Subject: [PATCH 67/72] fix: type rename and add tsx --- package.json | 1 + src/adapters/openai/helpers/completions.ts | 39 ----- src/handlers/{ask-gpt.ts => ask-llm.ts} | 0 src/handlers/comments.ts | 4 +- src/helpers/format-chat-history.ts | 2 +- src/helpers/issue-fetching.ts | 6 +- src/helpers/issue-handling.ts | 4 +- src/helpers/issue.ts | 22 +-- src/plugin.ts | 2 +- src/types/env.ts | 2 +- src/types/{github.ts => github-types.d.ts} | 0 src/types/{gpt.ts => llm.d.ts} | 0 tests/main.test.ts | 2 +- yarn.lock | 168 ++++++++++++++++++++- 14 files changed, 182 insertions(+), 70 deletions(-) rename src/handlers/{ask-gpt.ts => ask-llm.ts} (100%) rename src/types/{github.ts => github-types.d.ts} (100%) rename src/types/{gpt.ts => llm.d.ts} (100%) diff --git a/package.json b/package.json index f12199a..31e77a6 100644 --- a/package.json +++ b/package.json @@ -63,6 +63,7 @@ "npm-run-all": "4.1.5", "prettier": "3.3.2", "ts-jest": "29.1.5", + "tsx": "4.15.6", "typescript": "5.4.5", "typescript-eslint": "7.13.1", "wrangler": "3.60.3" diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index b65d67b..af9a565 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -77,43 +77,4 @@ export class Completions extends SuperOpenAi { } return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } }; } - - async contextCompressionCalls(context: string[]): Promise { - const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ - model: "mistralai/mistral-nemo", - messages: [ - { - role: "system", - content: [ - { - type: "text", - text: "You are a LLM responsible for compression the context for better processing, do not leave anything out", - }, - ], - }, - { - role: "user", - content: [ - { - type: "text", - text: context.join("\n"), - }, - ], - }, - ], - temperature: 0.2, - max_tokens: 300, - top_p: 1, - frequency_penalty: 0, - presence_penalty: 0, - response_format: { - type: "text", - }, - }); - const answer = res.choices[0].message; - if (answer && answer.content && res.usage) { - return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } }; - } - return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } }; - } } diff --git a/src/handlers/ask-gpt.ts b/src/handlers/ask-llm.ts similarity index 100% rename from src/handlers/ask-gpt.ts rename to src/handlers/ask-llm.ts diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts index 1cd522e..cf12053 100644 --- a/src/handlers/comments.ts +++ b/src/handlers/comments.ts @@ -1,6 +1,6 @@ import { splitKey } from "../helpers/issue"; -import { LinkedIssues, SimplifiedComment } from "../types/github"; -import { StreamlinedComment } from "../types/gpt"; +import { LinkedIssues, SimplifiedComment } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; /** * Get all streamlined comments from linked issues diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 2d01711..5c92c63 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -1,5 +1,5 @@ import { Context } from "../types"; -import { StreamlinedComment, StreamlinedComments } from "../types/gpt"; +import { StreamlinedComment, StreamlinedComments } from "../types/llm"; import { createKey, streamlineComments } from "../handlers/comments"; import { fetchPullRequestDiff, fetchIssue, fetchIssueComments } from "./issue-fetching"; import { splitKey } from "./issue"; diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 5ad8ca2..5af306e 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,8 +1,8 @@ import { createKey, getAllStreamlinedComments } from "../handlers/comments"; import { Context } from "../types"; -import { IssueWithUser, SimplifiedComment, User } from "../types/github"; -import { FetchParams, Issue, Comments, LinkedIssues } from "../types/github"; -import { StreamlinedComment } from "../types/gpt"; +import { IssueWithUser, SimplifiedComment, User } from "../types/github-types"; +import { FetchParams, Issue, Comments, LinkedIssues } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; import { dedupeStreamlinedComments, fetchCodeLinkedFromIssue, diff --git a/src/helpers/issue-handling.ts b/src/helpers/issue-handling.ts index 440926e..3f44225 100644 --- a/src/helpers/issue-handling.ts +++ b/src/helpers/issue-handling.ts @@ -1,6 +1,6 @@ import { createKey } from "../handlers/comments"; -import { FetchParams } from "../types/github"; -import { StreamlinedComment } from "../types/gpt"; +import { FetchParams } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; import { idIssueFromComment, mergeStreamlinedComments, splitKey } from "./issue"; import { fetchLinkedIssues, fetchIssue, fetchAndHandleIssue, mergeCommentsAndFetchSpec } from "./issue-fetching"; diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 0bf2779..69dc4dd 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -1,6 +1,6 @@ import { createKey } from "../handlers/comments"; -import { FetchedCodes, FetchParams, LinkedIssues } from "../types/github"; -import { StreamlinedComment } from "../types/gpt"; +import { FetchedCodes, FetchParams, LinkedIssues } from "../types/github-types"; +import { StreamlinedComment } from "../types/llm"; import { Context } from "../types/context"; // Import Context type /** @@ -66,22 +66,6 @@ export function idIssueFromComment(comment?: string | null): LinkedIssues[] | nu response.push(createLinkedIssueOrPr(url)); }); } - // This section handles issue references using markdown format (e.g., #123) - // const hashMatch = comment?.match(/#(\d+)/g); - // if (hashMatch) { - // const owner = params?.context.payload.repository?.owner?.login || ""; - // const repo = params?.context.payload.repository?.name || ""; - - // hashMatch.forEach((hash) => { - // const issueNumber = hash.replace("#", ""); - // response.push({ - // owner, - // repo, - // issueNumber: parseInt(issueNumber, 10), - // url: `https://github.com/${owner}/${repo}/issues/${issueNumber}` - // }); - // }); - // } return response.length > 0 ? response : null; } @@ -246,7 +230,7 @@ export function optimizeContext(strings: string[]): string[] { * @returns The content of the README file as a string. */ export async function pullReadmeFromRepoForIssue(params: FetchParams): Promise { - let readme = undefined; + let readme; try { const response = await params.context.octokit.repos.getContent({ owner: params.context.payload.repository.owner?.login || params.context.payload.organization?.login || "", diff --git a/src/plugin.ts b/src/plugin.ts index c74a00f..a886a4b 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,7 +1,7 @@ import { Octokit } from "@octokit/rest"; import { PluginInputs } from "./types"; import { Context } from "./types"; -import { askQuestion } from "./handlers/ask-gpt"; +import { askQuestion } from "./handlers/ask-llm"; import { addCommentToIssue } from "./handlers/add-comment"; import { LogLevel, LogReturn, Logs } from "@ubiquity-dao/ubiquibot-logger"; import { Env } from "./types/env"; diff --git a/src/types/env.ts b/src/types/env.ts index a378990..d548e9d 100644 --- a/src/types/env.ts +++ b/src/types/env.ts @@ -13,7 +13,7 @@ dotenv.config(); */ export const envSchema = T.Object({ OPENAI_API_KEY: T.String(), - UBIQUITY_OS_APP_NAME: T.String(), + UBIQUITY_OS_APP_NAME: T.String({ default: "UbiquityOS" }), VOYAGEAI_API_KEY: T.String(), SUPABASE_URL: T.String(), SUPABASE_KEY: T.String(), diff --git a/src/types/github.ts b/src/types/github-types.d.ts similarity index 100% rename from src/types/github.ts rename to src/types/github-types.d.ts diff --git a/src/types/gpt.ts b/src/types/llm.d.ts similarity index 100% rename from src/types/gpt.ts rename to src/types/llm.d.ts diff --git a/tests/main.test.ts b/tests/main.test.ts index 51ca676..5cf9b96 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -7,7 +7,7 @@ import { Context, SupportedEventsU } from "../src/types"; import { drop } from "@mswjs/data"; import issueTemplate from "./__mocks__/issue-template"; import repoTemplate from "./__mocks__/repo-template"; -import { askQuestion } from "../src/handlers/ask-gpt"; +import { askQuestion } from "../src/handlers/ask-llm"; import { runPlugin } from "../src/plugin"; import { TransformDecodeCheckError, Value } from "@sinclair/typebox/value"; import { envSchema } from "../src/types/env"; diff --git a/yarn.lock b/yarn.lock index d8c7ed1..a5fea8f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -928,116 +928,231 @@ escape-string-regexp "^4.0.0" rollup-plugin-node-polyfills "^0.2.1" +"@esbuild/aix-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" + integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== + "@esbuild/android-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd" integrity sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA== +"@esbuild/android-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" + integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== + "@esbuild/android-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d" integrity sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A== +"@esbuild/android-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" + integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== + "@esbuild/android-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1" integrity sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww== +"@esbuild/android-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" + integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== + "@esbuild/darwin-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276" integrity sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg== +"@esbuild/darwin-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" + integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== + "@esbuild/darwin-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb" integrity sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw== +"@esbuild/darwin-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" + integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== + "@esbuild/freebsd-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2" integrity sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ== +"@esbuild/freebsd-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" + integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== + "@esbuild/freebsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4" integrity sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ== +"@esbuild/freebsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" + integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== + "@esbuild/linux-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb" integrity sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg== +"@esbuild/linux-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" + integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== + "@esbuild/linux-arm@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a" integrity sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA== +"@esbuild/linux-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" + integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== + "@esbuild/linux-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a" integrity sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ== +"@esbuild/linux-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" + integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== + "@esbuild/linux-loong64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72" integrity sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ== +"@esbuild/linux-loong64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" + integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== + "@esbuild/linux-mips64el@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289" integrity sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A== +"@esbuild/linux-mips64el@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" + integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== + "@esbuild/linux-ppc64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7" integrity sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg== +"@esbuild/linux-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" + integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== + "@esbuild/linux-riscv64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09" integrity sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA== +"@esbuild/linux-riscv64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" + integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== + "@esbuild/linux-s390x@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829" integrity sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q== +"@esbuild/linux-s390x@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" + integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== + "@esbuild/linux-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4" integrity sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw== +"@esbuild/linux-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" + integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== + "@esbuild/netbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462" integrity sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q== +"@esbuild/netbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" + integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== + "@esbuild/openbsd-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691" integrity sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g== +"@esbuild/openbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" + integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== + "@esbuild/sunos-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273" integrity sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg== +"@esbuild/sunos-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" + integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== + "@esbuild/win32-arm64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f" integrity sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag== +"@esbuild/win32-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" + integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== + "@esbuild/win32-ia32@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03" integrity sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw== +"@esbuild/win32-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" + integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== + "@esbuild/win32-x64@0.17.19": version "0.17.19" resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061" integrity sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA== +"@esbuild/win32-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" + integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== + "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -3123,6 +3238,35 @@ esbuild@0.17.19: "@esbuild/win32-ia32" "0.17.19" "@esbuild/win32-x64" "0.17.19" +esbuild@~0.21.4: + version "0.21.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" + integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw== + optionalDependencies: + "@esbuild/aix-ppc64" "0.21.5" + "@esbuild/android-arm" "0.21.5" + "@esbuild/android-arm64" "0.21.5" + "@esbuild/android-x64" "0.21.5" + "@esbuild/darwin-arm64" "0.21.5" + "@esbuild/darwin-x64" "0.21.5" + "@esbuild/freebsd-arm64" "0.21.5" + "@esbuild/freebsd-x64" "0.21.5" + "@esbuild/linux-arm" "0.21.5" + "@esbuild/linux-arm64" "0.21.5" + "@esbuild/linux-ia32" "0.21.5" + "@esbuild/linux-loong64" "0.21.5" + "@esbuild/linux-mips64el" "0.21.5" + "@esbuild/linux-ppc64" "0.21.5" + "@esbuild/linux-riscv64" "0.21.5" + "@esbuild/linux-s390x" "0.21.5" + "@esbuild/linux-x64" "0.21.5" + "@esbuild/netbsd-x64" "0.21.5" + "@esbuild/openbsd-x64" "0.21.5" + "@esbuild/sunos-x64" "0.21.5" + "@esbuild/win32-arm64" "0.21.5" + "@esbuild/win32-ia32" "0.21.5" + "@esbuild/win32-x64" "0.21.5" + escalade@^3.1.1, escalade@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27" @@ -3492,7 +3636,7 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -fsevents@^2.3.2, fsevents@~2.3.2: +fsevents@^2.3.2, fsevents@~2.3.2, fsevents@~2.3.3: version "2.3.3" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== @@ -3585,6 +3729,13 @@ get-symbol-description@^1.0.2: es-errors "^1.3.0" get-intrinsic "^1.2.4" +get-tsconfig@^4.7.5: + version "4.8.1" + resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.8.1.tgz#8995eb391ae6e1638d251118c7b56de7eb425471" + integrity sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg== + dependencies: + resolve-pkg-maps "^1.0.0" + git-raw-commits@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/git-raw-commits/-/git-raw-commits-4.0.0.tgz#b212fd2bff9726d27c1283a1157e829490593285" @@ -5556,6 +5707,11 @@ resolve-from@^5.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== +resolve-pkg-maps@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f" + integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw== + resolve.exports@^2.0.0, resolve.exports@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" @@ -6155,6 +6311,16 @@ tslib@^2.2.0, tslib@^2.6.2: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== +tsx@4.15.6: + version "4.15.6" + resolved "https://registry.yarnpkg.com/tsx/-/tsx-4.15.6.tgz#4522ed093f7fa54f031a7a999274e8b35dbf3165" + integrity sha512-is0VQQlfNZRHEuSSTKA6m4xw74IU4AizmuB6lAYLRt9XtuyeQnyJYexhNZOPCB59SqC4JzmSzPnHGBXxf3k0hA== + dependencies: + esbuild "~0.21.4" + get-tsconfig "^4.7.5" + optionalDependencies: + fsevents "~2.3.3" + type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" From 6c7f136043b2de6abe901d15dbd41ef93bcae229 Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Thu, 17 Oct 2024 01:21:31 -0400 Subject: [PATCH 68/72] fix: knip --- .github/knip.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/knip.ts b/.github/knip.ts index 3eb78f9..2ca6fdf 100644 --- a/.github/knip.ts +++ b/.github/knip.ts @@ -6,7 +6,7 @@ const config: KnipConfig = { ignore: ["src/types/config.ts", "**/__mocks__/**", "**/__fixtures__/**"], ignoreExportsUsedInFile: true, // eslint can also be safely ignored as per the docs: https://knip.dev/guides/handling-issues#eslint--jest - ignoreDependencies: ["eslint-config-prettier", "eslint-plugin-prettier"], + ignoreDependencies: ["eslint-config-prettier", "eslint-plugin-prettier", "tsx"], eslint: true, }; From 958328673c11aac72d5c59b5d7ca8fab9d78ad35 Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Thu, 17 Oct 2024 01:58:26 -0400 Subject: [PATCH 69/72] feat: added instruction to the embedding --- src/adapters/openai/helpers/completions.ts | 1 - src/adapters/supabase/helpers/comment.ts | 2 +- src/adapters/supabase/helpers/issues.ts | 2 +- src/adapters/voyage/helpers/embedding.ts | 5 +++-- src/handlers/ask-llm.ts | 2 -- src/helpers/issue-fetching.ts | 1 - src/helpers/issue.ts | 1 - src/plugin.ts | 1 - 8 files changed, 5 insertions(+), 10 deletions(-) diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index af9a565..f68f305 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -71,7 +71,6 @@ export class Completions extends SuperOpenAi { }, }); const answer = res.choices[0].message; - console.log(JSON.stringify(res, null, 2)); if (answer && answer.content && res.usage) { return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } }; } diff --git a/src/adapters/supabase/helpers/comment.ts b/src/adapters/supabase/helpers/comment.ts index e6aff31..d09f24a 100644 --- a/src/adapters/supabase/helpers/comment.ts +++ b/src/adapters/supabase/helpers/comment.ts @@ -33,7 +33,7 @@ export class Comment extends SuperSupabase { } async findSimilarComments(query: string, threshold: number, currentId: string): Promise { - const embedding = await this.context.adapters.voyage.embedding.createEmbedding(query); + const embedding = await this.context.adapters.voyage.embedding.createEmbedding({ text: query, prompt: "This is a query for the stored documents:" }); const { data, error } = await this.supabase.rpc("find_similar_comments", { current_id: currentId, query_text: query, diff --git a/src/adapters/supabase/helpers/issues.ts b/src/adapters/supabase/helpers/issues.ts index 8bd083e..142ef02 100644 --- a/src/adapters/supabase/helpers/issues.ts +++ b/src/adapters/supabase/helpers/issues.ts @@ -33,7 +33,7 @@ export class Issue extends SuperSupabase { return data; } async findSimilarIssues(plaintext: string, threshold: number, currentId: string): Promise { - const embedding = await this.context.adapters.voyage.embedding.createEmbedding(plaintext); + const embedding = await this.context.adapters.voyage.embedding.createEmbedding({ text: plaintext, prompt: "This is a query for the stored documents:" }); const { data, error } = await this.supabase.rpc("find_similar_issue_ftse", { current_id: currentId, query_text: plaintext, diff --git a/src/adapters/voyage/helpers/embedding.ts b/src/adapters/voyage/helpers/embedding.ts index 575543e..68797e2 100644 --- a/src/adapters/voyage/helpers/embedding.ts +++ b/src/adapters/voyage/helpers/embedding.ts @@ -11,12 +11,13 @@ export class Embedding extends SuperVoyage { this.context = context; } - async createEmbedding(text: string | null): Promise { + async createEmbedding(input: { text?: string; prompt?: string } = {}): Promise { + const { text = null, prompt = null } = input; if (text === null) { return new Array(VECTOR_SIZE).fill(0); } else { const response = await this.client.embed({ - input: text, + input: prompt ? `${prompt} ${text}` : text, model: "voyage-large-2-instruct", }); return (response.data && response.data[0]?.embedding) || []; diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index b9992ac..9a01ac1 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -23,7 +23,6 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - console.log("formattedChat", formattedChat); return await askGpt(context, question, formattedChat); } @@ -56,7 +55,6 @@ export async function askGpt(context: Context, question: string, formattedChat: // Remove Null Results (Private Comments) similarText = similarText.filter((text) => text !== null); formattedChat = formattedChat.filter((text) => text !== null); - context.logger.info(formattedChat.join("")); // Optimize the context formattedChat = optimizeContext(formattedChat); // ReRank the results based on the question diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 5af306e..486d83e 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -44,7 +44,6 @@ export async function fetchLinkedIssues(params: FetchParams) { throw new Error("Issue body or URL not found"); } - console.log(params, params.owner, params.repo); if (!params.owner || !params.repo) { throw new Error("Owner, repo, or issue number not found"); } diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 69dc4dd..2c76179 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -126,7 +126,6 @@ export async function fetchCodeLinkedFromIssue( let parsedUrl = parseGitHubUrl(url); parsedUrl = parsedUrl ? { ...parsedUrl, path: removeLineNumbers(parsedUrl.path) } : null; if (!parsedUrl || !hasValidExtension(parsedUrl.path)) return null; - console.log(`Fetching content from ${url}`); try { //Parse the commit sha from the URL const commitSha = url.match(/https?:\/\/github\.com\/[^/]+\/[^/]+\/blob\/([^/]+)\/.+/); diff --git a/src/plugin.ts b/src/plugin.ts index a886a4b..8eab234 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -13,7 +13,6 @@ import OpenAI from "openai"; export async function plugin(inputs: PluginInputs, env: Env) { const octokit = new Octokit({ auth: inputs.authToken }); const supabase = createClient(env.SUPABASE_URL, env.SUPABASE_KEY); - console.log("inputs", env); const voyageClient = new VoyageAIClient({ apiKey: env.VOYAGEAI_API_KEY, }); From baec08a95be7349a69ef6b043cbb4fd6b3b80943 Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Thu, 17 Oct 2024 02:17:20 -0400 Subject: [PATCH 70/72] fix: tests --- src/handlers/ask-llm.ts | 14 +------------- tests/main.test.ts | 4 ++-- 2 files changed, 3 insertions(+), 15 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index 9a01ac1..fa8a883 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -23,6 +23,7 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); + context.logger.info(`${formattedChat}`); return await askGpt(context, question, formattedChat); } @@ -61,7 +62,6 @@ export async function askGpt(context: Context, question: string, formattedChat: // const reRankedChat = formattedChat.length > 0 ? await context.adapters.voyage.reranker.reRankResults(formattedChat.filter(text => text !== ""), question, 300) : []; similarText = similarText.filter((text) => text !== ""); const rerankedText = similarText.length > 0 ? await context.adapters.voyage.reranker.reRankResults(similarText, question) : []; - rerankedText.forEach((text) => removeUnwantedChars(text)); return context.adapters.openai.completions.createCompletion( question, model, @@ -71,15 +71,3 @@ export async function askGpt(context: Context, question: string, formattedChat: UBIQUITY_OS_APP_NAME ); } - -/** - * Removes unwanted characters from the text like emojis, special characters etc. - * @param text - * @returns - */ -function removeUnwantedChars(text: string): string { - if (!text) { - return ""; - } - return text.replace(/[^a-zA-Z0-9\s]/g, ""); -} diff --git a/tests/main.test.ts b/tests/main.test.ts index 5cf9b96..29882e6 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -145,7 +145,7 @@ describe("Ask plugin tests", () => { 2 ubiquity: ${TEST_QUESTION} [#1](https://www.github.com/ubiquity/test-repo/issues/1) === End Current Issue #1 Conversation === - === Linked Issue #2 Specification === ubiquity/test-repo/2 === + ,=== Linked Issue #2 Specification === ubiquity/test-repo/2 === Related to issue #3 === End Linked Issue #2 Specification === @@ -155,7 +155,7 @@ describe("Ask plugin tests", () => { 3 ubiquity: ${ISSUE_ID_3_CONTENT} [#3](https://www.github.com/ubiquity/test-repo/issues/3) === End Linked Issue #2 Conversation === - === Linked Issue #3 Specification === ubiquity/test-repo/3 === + ,=== Linked Issue #3 Specification === ubiquity/test-repo/3 === Just another issue === End Linked Issue #3 Specification === From 1edbd217430ad490e5122707914aaf08d88647ab Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Thu, 17 Oct 2024 02:20:38 -0400 Subject: [PATCH 71/72] fix: inverted the scale on similarity threshold --- src/handlers/ask-llm.ts | 4 ++-- src/types/plugin-inputs.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index fa8a883..cf6aeae 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -42,12 +42,12 @@ export async function askGpt(context: Context, question: string, formattedChat: let similarComments: CommentSimilaritySearchResult[] = []; let similarIssues: IssueSimilaritySearchResult[] = []; try { - similarComments = (await context.adapters.supabase.comment.findSimilarComments(question, similarityThreshold, "")) || []; + similarComments = (await context.adapters.supabase.comment.findSimilarComments(question, 1 - similarityThreshold, "")) || []; } catch (error) { context.logger.error(`Error fetching similar comments: ${(error as Error).message}`); } try { - similarIssues = (await context.adapters.supabase.issue.findSimilarIssues(question, similarityThreshold, "")) || []; + similarIssues = (await context.adapters.supabase.issue.findSimilarIssues(question, 1 - similarityThreshold, "")) || []; } catch (error) { context.logger.error(`Error fetching similar issues: ${(error as Error).message}`); } diff --git a/src/types/plugin-inputs.ts b/src/types/plugin-inputs.ts index 694bb50..a98f0be 100644 --- a/src/types/plugin-inputs.ts +++ b/src/types/plugin-inputs.ts @@ -22,7 +22,7 @@ export interface PluginInputs Date: Thu, 17 Oct 2024 02:31:24 -0400 Subject: [PATCH 72/72] fix: removed jest commas --- src/handlers/ask-llm.ts | 2 +- tests/main.test.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index cf6aeae..30112c3 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -23,7 +23,7 @@ export async function askQuestion(context: Context, question: string) { repo: context.payload.repository.name, }); const formattedChat = await formatChatHistory(context, streamlinedComments, specAndBodies); - context.logger.info(`${formattedChat}`); + context.logger.info(`${formattedChat.join("")}`); return await askGpt(context, question, formattedChat); } diff --git a/tests/main.test.ts b/tests/main.test.ts index 29882e6..9875f81 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -145,7 +145,7 @@ describe("Ask plugin tests", () => { 2 ubiquity: ${TEST_QUESTION} [#1](https://www.github.com/ubiquity/test-repo/issues/1) === End Current Issue #1 Conversation === - ,=== Linked Issue #2 Specification === ubiquity/test-repo/2 === + === Linked Issue #2 Specification === ubiquity/test-repo/2 === Related to issue #3 === End Linked Issue #2 Specification === @@ -155,7 +155,7 @@ describe("Ask plugin tests", () => { 3 ubiquity: ${ISSUE_ID_3_CONTENT} [#3](https://www.github.com/ubiquity/test-repo/issues/3) === End Linked Issue #2 Conversation === - ,=== Linked Issue #3 Specification === ubiquity/test-repo/3 === + === Linked Issue #3 Specification === ubiquity/test-repo/3 === Just another issue === End Linked Issue #3 Specification ===