Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions src/LangtailPrompts.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,42 @@ describe.skipIf(!liveTesting)(

describe("LangtailPrompts", () => {
describe("invoke with optional callbacks", () => {

it("should pass parallel_tool_calls param to fetch", async () => {
const mockFetch = vi.fn().mockResolvedValue({
ok: true,
status: 200,
json: async () => ({
choices: [
{
message: {
content: "Test response",
},
},
],
}),
headers: new Headers({
'Content-Type': 'application/json',
'X-API-Key': 'test-api-key',
'x-langtail-thread-id': 'test-thread-id'
}),
});

const lt = new LangtailPrompts({
apiKey: "test-api-key",
fetch: mockFetch,
});

await lt.invoke({
prompt: "test-prompt",
environment: "production",
parallelToolCalls: true,
});

expect(mockFetch).toHaveBeenCalled();
const body = JSON.parse(mockFetch.mock.calls[0][1].body);
expect(body).toHaveProperty('parallelToolCalls', true);
});
it("should trigger onRawResponse callback when response is returned", async () => {
const mockFetch = vi.fn().mockResolvedValue({
ok: true,
Expand Down
1 change: 1 addition & 0 deletions src/LangtailPrompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ export type IRequestParams<P extends PromptSlug, E extends Environment<P> = unde

export type IRequestParamsStream<P extends PromptSlug, E extends Environment<P> = undefined, V extends Version<P, E> = undefined, S extends boolean | undefined = false> = IRequestParams<P, E, V> & {
stream?: S
parallelToolCalls?: boolean
}

export type IInvokeOptionalCallbacks = {
Expand Down
33 changes: 33 additions & 0 deletions src/getOpenAIBody.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,39 @@ describe("getOpenAIBody", () => {
`)
})


it("should add parallel_tool_calls param when it is set in parsedBody", () => {
const completionConfig = {
state: {
type: "chat" as const,
args: {
model: "gpt-3.5-turbo",
max_tokens: 100,
temperature: 0.8,
top_p: 1,
presence_penalty: 0,
frequency_penalty: 0,
jsonmode: false,
seed: 123,
stop: [],
},
template: [
{
role: "system" as const,
content: "tell me a story",
},
],
},
chatInput: {},
}

const openAIbody = getOpenAIBody(completionConfig, {
parallelToolCalls: true,
})

expect(openAIbody).toHaveProperty('parallel_tool_calls', true)
})

it("should override parameters from the playground with the ones in parsedBody", () => {
const completionConfig = {
state: {
Expand Down
3 changes: 3 additions & 0 deletions src/getOpenAIBody.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ export function getOpenAIBody(
temperature: parsedBody.temperature ?? completionArgs.temperature,
messages: inputMessages,
top_p: parsedBody.top_p ?? completionArgs.top_p,
...(parsedBody.parallelToolCalls !== undefined
? { parallel_tool_calls: parsedBody.parallelToolCalls }
: {}),
presence_penalty:
parsedBody.presence_penalty ?? completionArgs.presence_penalty,
frequency_penalty:
Expand Down
2 changes: 2 additions & 0 deletions src/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ export const bodyMetadataSchema = z

export const langtailBodySchema = z.object({
doNotRecord: z.boolean().optional(),
parallelToolCalls: z.boolean().optional(),
metadata: bodyMetadataSchema,
_langtailTestRunId: z.string().optional(),
_langtailTestInputId: z.string().optional(),
Expand All @@ -226,6 +227,7 @@ export const openAIBodySchemaObjectDefinition = {
max_tokens: z.number().optional(),
temperature: z.number().optional(),
top_p: z.number().optional(),
parallel_tool_calls: z.boolean().optional(),
presence_penalty: z.number().optional(),
frequency_penalty: z.number().optional(),
model: z.string().optional(),
Expand Down