Skip to content

Commit

Permalink
chore: embed groundTruths in html comment
Browse files Browse the repository at this point in the history
  • Loading branch information
Keyrxng committed Oct 25, 2024
1 parent 9615b00 commit 29177f5
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 11 deletions.
9 changes: 7 additions & 2 deletions src/adapters/openai/helpers/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const MAX_TOKENS = 7000;

export interface CompletionsType {
answer: string;
groundTruths: string[];
tokenUsage: {
input: number;
output: number;
Expand Down Expand Up @@ -73,9 +74,13 @@ export class Completions extends SuperOpenAi {
});
const answer = res.choices[0].message;
if (answer && answer.content && res.usage) {
return { answer: answer.content, tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens } };
return {
answer: answer.content,
groundTruths,
tokenUsage: { input: res.usage.prompt_tokens, output: res.usage.completion_tokens, total: res.usage.total_tokens },
};
}
return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 } };
return { answer: "", tokenUsage: { input: 0, output: 0, total: 0 }, groundTruths };
}

async createGroundTruthCompletion<TApp extends ModelApplications>(
Expand Down
39 changes: 36 additions & 3 deletions src/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,19 @@ export async function runPlugin(context: Context) {
let commentToPost;
try {
const response = await askQuestion(context, question);
const { answer, tokenUsage } = response;
const { answer, tokenUsage, groundTruths } = response;
if (!answer) {
throw logger.error(`No answer from OpenAI`);
}
logger.info(`Answer: ${answer}`, { tokenUsage });
const tokens = `\n\n<!--\n${JSON.stringify(tokenUsage, null, 2)}\n--!>`;
commentToPost = answer + tokens;

const metadata = {
groundTruths,
tokenUsage,
};

const metadataString = createStructuredMetadata("LLM Ground Truths and Token Usage", logger.info(`Answer: ${answer}`, { metadata }));
commentToPost = answer + metadataString;
} catch (err) {
let errorMessage;
if (err instanceof LogReturn) {
Expand All @@ -81,3 +87,30 @@ export async function runPlugin(context: Context) {
function sanitizeMetadata(obj: LogReturn["metadata"]): string {
return JSON.stringify(obj, null, 2).replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/--/g, "&#45;&#45;");
}

function createStructuredMetadata(header: string | undefined, logReturn: LogReturn) {
let logMessage, metadata;
if (logReturn) {
logMessage = logReturn.logMessage;
metadata = logReturn.metadata;
}

const jsonPretty = sanitizeMetadata(metadata);
const stackLine = new Error().stack?.split("\n")[2] ?? "";
const caller = stackLine.match(/at (\S+)/)?.[1] ?? "";
const ubiquityMetadataHeader = `\n\n<!-- Ubiquity - ${header} - ${caller} - ${metadata?.revision}`;

let metadataSerialized: string;
const metadataSerializedVisible = ["```json", jsonPretty, "```"].join("\n");
const metadataSerializedHidden = [ubiquityMetadataHeader, jsonPretty, "-->"].join("\n");

if (logMessage?.type === "fatal") {
// if the log message is fatal, then we want to show the metadata
metadataSerialized = [metadataSerializedVisible, metadataSerializedHidden].join("\n");
} else {
// otherwise we want to hide it
metadataSerialized = metadataSerializedHidden;
}

return metadataSerialized;
}
11 changes: 5 additions & 6 deletions tests/main.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ const TEST_SLASH_COMMAND = "@UbiquityOS what is pi?";
const LOG_CALLER = "_Logs.<anonymous>";
const ISSUE_ID_2_CONTENT = "More context here #2";
const ISSUE_ID_3_CONTENT = "More context here #3";
const MOCK_ANSWER = "This is a mock answer for the chat";

type Comment = {
id: number;
Expand Down Expand Up @@ -61,7 +62,7 @@ describe("Ask plugin tests", () => {

expect(res).toBeDefined();

expect(res?.answer).toBe("This is a mock answer for the chat");
expect(res?.answer).toBe(MOCK_ANSWER);
});

it("should not ask GPT a question if comment is from a bot", async () => {
Expand Down Expand Up @@ -106,7 +107,6 @@ describe("Ask plugin tests", () => {
createComments([transformCommentTemplate(1, 1, TEST_QUESTION, "ubiquity", "test-repo", true)]);
await runPlugin(ctx);

expect(infoSpy).toHaveBeenCalledTimes(3);
expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`);
expect(infoSpy).toHaveBeenNthCalledWith(3, "Answer: This is a mock answer for the chat", {
caller: LOG_CALLER,
Expand All @@ -130,8 +130,6 @@ describe("Ask plugin tests", () => {

await runPlugin(ctx);

expect(infoSpy).toHaveBeenCalledTimes(3);

expect(infoSpy).toHaveBeenNthCalledWith(1, `Asking question: @UbiquityOS ${TEST_QUESTION}`);

const prompt = `=== Current Issue #1 Specification === ubiquity/test-repo/1 ===
Expand Down Expand Up @@ -395,7 +393,8 @@ function createContext(body = TEST_SLASH_COMMAND) {
completions: {
createCompletion: async (): Promise<CompletionsType> => {
return {
answer: "This is a mock answer for the chat",
answer: MOCK_ANSWER,
groundTruths: [MOCK_ANSWER],
tokenUsage: {
input: 1000,
output: 150,
Expand All @@ -404,7 +403,7 @@ function createContext(body = TEST_SLASH_COMMAND) {
};
},
createGroundTruthCompletion: async (): Promise<string> => {
return '["This is a mock answer for the chat"]';
return `["${MOCK_ANSWER}"]`;
},
},
},
Expand Down

0 comments on commit 29177f5

Please sign in to comment.