Skip to content

Commit

Permalink
Merge pull request #114 from samchon/feat/empty-parameters
Browse files Browse the repository at this point in the history
Test empty parameters' function calling cases
  • Loading branch information
samchon authored Dec 17, 2024
2 parents 955f41a + 783a1af commit 1ae8dd5
Show file tree
Hide file tree
Showing 6 changed files with 196 additions and 8 deletions.
13 changes: 6 additions & 7 deletions examples/function-calling/schemas/llama.sale.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -327,16 +327,16 @@
},
"minItems": 1
},
"required": {
"title": "Whether the unit is required or not",
"description": "Whether the unit is required or not.\n\nWhen the unit is required, the customer must select the unit. If do not\nselect, customer can't buy it.\n\nFor example, if there's a sale \"Macbook Set\" and one of the unit is the\n\"Main Body\", is it possible to buy the \"Macbook Set\" without the\n\"Main Body\" unit? This property is for that case.",
"type": "boolean"
},
"name": {
"title": "Representative name of the unit",
"description": "Representative name of the unit.",
"type": "string"
},
"required": {
"title": "Whether the unit is required or not",
"description": "Whether the unit is required or not.\n\nWhen the unit is required, the customer must select the unit. If do not\nselect, customer can't buy it.\n\nFor example, if there's a sale \"Macbook Set\" and one of the unit is the\n\"Main Body\", is it possible to buy the \"Macbook Set\" without the\n\"Main Body\" unit? This property is for that case.",
"type": "boolean"
},
"primary": {
"title": "Whether the unit is primary or not",
"description": "Whether the unit is primary or not.\n\nJust a labeling value.",
Expand All @@ -346,8 +346,8 @@
"required": [
"options",
"stocks",
"required",
"name",
"required",
"primary"
]
},
Expand All @@ -364,7 +364,6 @@
},
"required": [
"section_code",
"status",
"opened_at",
"closed_at",
"content",
Expand Down
39 changes: 39 additions & 0 deletions test/features/llm/chatgpt/test_chatgpt_function_calling_empty.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { TestValidator } from "@nestia/e2e";
import OpenAI from "openai";

import { TestGlobal } from "../../../TestGlobal";

export const test_chatgpt_function_calling_empty = async (): Promise<void> => {
if (TestGlobal.env.CHATGPT_API_KEY === undefined) return;

const client: OpenAI = new OpenAI({ apiKey: TestGlobal.env.CHATGPT_API_KEY });
const completion: OpenAI.ChatCompletion =
await client.chat.completions.create({
model: "gpt-4o",
messages: [
{
role: "system",
content:
"You are a helpful customer support assistant. Use the supplied tools to assist the user.",
},
{
role: "user",
content: "Call a print function please.",
},
],
tools: [
{
type: "function",
function: {
name: "print",
description: "Print to the screen.",
},
},
],
tool_choice: "required",
parallel_tool_calls: false,
});
const call = completion.choices[0].message.tool_calls?.[0];
TestValidator.equals("name")(call?.function.name)("print");
TestValidator.equals("arguments")(call?.function.arguments)("{}");
};
45 changes: 45 additions & 0 deletions test/features/llm/claude/test_claude_function_calling_empty.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import Anthropic from "@anthropic-ai/sdk";
import { TestValidator } from "@nestia/e2e";

import { TestGlobal } from "../../../TestGlobal";

export const test_claude_function_calling_empty = async (): Promise<void> => {
if (TestGlobal.env.CLAUDE_API_KEY === undefined) return;

const client: Anthropic = new Anthropic({
apiKey: TestGlobal.env.CLAUDE_API_KEY,
});
const completion: Anthropic.Message = await client.messages.create({
model: "claude-3-5-sonnet-latest",
max_tokens: 8_192,
messages: [
{
role: "assistant",
content:
"You are a helpful customer support assistant. Use the supplied tools to assist the user.",
},
{
role: "user",
content: "Call a print function please.",
},
],
tools: [
{
name: "print",
description: "Print to the screen.",
input_schema: {
type: "object",
properties: {},
required: [],
},
},
],
tool_choice: {
type: "any",
disable_parallel_tool_use: true,
},
});
const call = completion.content.filter((c) => c.type === "tool_use")?.[0];
TestValidator.equals("name")(call?.name)("print");
TestValidator.equals("arguments")(call?.input)({});
};
58 changes: 58 additions & 0 deletions test/features/llm/gemini/test_gemini_function_calling_empty.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import {
FunctionCallingMode,
GenerateContentResult,
GenerativeModel,
GoogleGenerativeAI,
} from "@google/generative-ai";
import { TestValidator } from "@nestia/e2e";

import { TestGlobal } from "../../../TestGlobal";

export const test_gemini_function_calling_empty = async (): Promise<void> => {
if (TestGlobal.env.GEMINI_API_KEY === undefined) return;

const model: GenerativeModel = new GoogleGenerativeAI(
TestGlobal.env.GEMINI_API_KEY,
).getGenerativeModel({
model: "gemini-1.5-pro",
});
const completion: GenerateContentResult = await model.generateContent({
contents: [
{
role: "model",
parts: [
{
text: "You are a helpful customer support assistant. Use the supplied tools to assist the user.",
},
],
},
{
role: "user",
parts: [
{
text: "Call a print function please.",
},
],
},
],
tools: [
{
functionDeclarations: [
{
name: "print",
description: "Print to the screen.",
},
],
},
],
toolConfig: {
functionCallingConfig: {
mode: FunctionCallingMode.ANY,
allowedFunctionNames: ["print"],
},
},
});
const call = (completion.response.functionCalls() ?? [])?.[0];
TestValidator.equals("name")(call?.name)("print");
TestValidator.equals("arguments")(call?.args)({});
};
47 changes: 47 additions & 0 deletions test/features/llm/llama/test_llama_function_calling_empty.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import { TestValidator } from "@nestia/e2e";
import OpenAI from "openai";

import { TestGlobal } from "../../../TestGlobal";

export const test_llama_function_calling_empty = async (): Promise<void> => {
if (TestGlobal.env.LLAMA_API_KEY === undefined) return;

const client: OpenAI = new OpenAI({
apiKey: TestGlobal.env.LLAMA_API_KEY,
baseURL: "https://api.llama-api.com",
});
const completion: OpenAI.ChatCompletion =
await client.chat.completions.create({
model: "llama3.3-70b",
messages: [
{
role: "system",
content:
"You are a helpful customer support assistant. Use the supplied tools to assist the user.",
},
{
role: "user",
content: "Call a print function please.",
},
],
tools: [
{
type: "function",
function: {
name: "print",
description: "Print to the screen.",
parameters: {
type: "object",
properties: {},
required: [],
},
},
},
],
tool_choice: "required",
parallel_tool_calls: false,
});
const call = completion.choices[0].message.tool_calls?.[0];
TestValidator.equals("name")(call?.function.name)("print");
TestValidator.equals("arguments")(call?.function.arguments)("{}");
};
2 changes: 1 addition & 1 deletion test/utils/LlamaFunctionCaller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ export namespace LlamaFunctionCaller {
});
const completion: OpenAI.ChatCompletion =
await client.chat.completions.create({
model: "llama3.2-90b-vision",
model: "llama3.3-70b",
messages: previous
? [
...props.texts.slice(0, -1),
Expand Down

0 comments on commit 1ae8dd5

Please sign in to comment.