Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# Changelog

## 0.14.0

- Update `openai` version
- Fix tool calls issues for Vercel AI SDK integration
- Remove `useChatStream` (moved to separate repo)

## 0.13.9

- Fix for browser runtime
Expand Down
74 changes: 1 addition & 73 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ const rawCompletion = await lt.chat.completions.create({
prompt: "<prompt-slug>",
doNotRecord: false, // false will ensure logs do not contain any info about payloads. You can still see the request in the logs, but you cannot see the variables etc.
metadata: {
"custom-field": 1,
"custom-field": "1",
},
})
```
Expand Down Expand Up @@ -319,75 +319,3 @@ runner.on("chunk", (chunk: ChatCompletionChunk) => {
// NOTE: chunk here is always a proper JSON even with parts of the message
})
```

## useChatStream React hook

"You can leverage our React hook to handle AI streams more easily. We have developed a hook called `useChatStream`, which can be imported from `langtail/react/useChatStream`.

Here's an example:

```ts
// NOTE: your FE code
import { useChatStream } from "langtail/react/useChatStream";

function YourComponent() {
const { isLoading, messages, send } = useChatStream({
fetcher: (message) =>
fetch(`/api/langtail`, {
method: "POST",
body: JSON.stringify({ messages: [message] }),
headers: {
"Content-Type": "application/json",
},
}).then((res) => res.body),
onToolCall: async (toolCall: ChatCompletionMessageToolCall, fullMessage) => {
if (toolCall.function.name === "weather") {
return "Sunny 22 degrees"
}

return "Unknown data"
}
});

useEffect(() => {
// Call send wherever you like with any content
send({ role: 'user' , content: "Can you hear me?"})
}, [])

// NOTE: the `messages` array is updated within the react providing you with live stream of the messages
return (
<>
{messages.map((message) => (
<p>
{message.role}: {message.content}
</p>
))}
</>
)
}
```

```ts
// NOTE: your next.js BE code, assuming that this is the route /api/langtail
import { Langtail } from "langtail"
import { NextRequest } from "next/server"

export const runtime = "edge"

export const lt = new Langtail({
apiKey: process.env.LANGTAIL_API_KEY ?? "",
})

// Create a new assistant
export async function POST(request: NextRequest) {
const messages = (await request.json()).messages

const result = await lt.prompts.invoke({
prompt: "weather",
messages,
stream: true,
})

return new Response(result.toReadableStream())
}
```
24 changes: 5 additions & 19 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "langtail",
"version": "0.13.9",
"version": "0.14.0",
"description": "",
"main": "./Langtail.js",
"packageManager": "pnpm@8.15.6",
Expand Down Expand Up @@ -35,17 +35,12 @@
],
"license": "MIT",
"devDependencies": {
"@testing-library/dom": "^10.0.0",
"@testing-library/react": "^16.0.0",
"@types/node": "^20.12.11",
"@types/react": "^18.3.3",
"copyfiles": "^2.4.1",
"fs-extra": "^11.2.0",
"jsdom": "^24.1.0",
"nock": "14.0.0-beta.5",
"prettier": "^3.2.5",
"react": "18.2.0",
"react-dom": "^18.0.0",
"tsup": "^8.0.2",
"typescript": "^5.4.5",
"vitest": "^1.6.0"
Expand Down Expand Up @@ -83,11 +78,6 @@
"import": "./stream/index.mjs",
"types": "./stream/index.d.ts"
},
"./react/useChatStream": {
"require": "./react/useChatStream.js",
"import": "./react/useChatStream.mjs",
"types": "./react/useChatStream.d.ts"
},
"./vercel-ai": {
"require": "./vercel-ai/index.js",
"import": "./vercel-ai/index.mjs",
Expand All @@ -99,18 +89,15 @@
"types": "./customTypes.d.ts"
}
},
"peerDependencies": {
"react": ">=18.2.0"
},
"dependencies": {
"@ai-sdk/provider": "^0.0.24",
"@ai-sdk/provider-utils": "^1.0.20",
"@ai-sdk/provider": "^1.0.1",
"@ai-sdk/provider-utils": "^2.0.3",
"@langtail/handlebars-evalless": "^0.1.2",
"commander": "^12.1.0",
"date-fns": "^3.6.0",
"dotenv-flow": "^4.1.0",
"json-schema-to-zod": "^2.1.0",
"openai": "^4.43.0",
"openai": "^4.76.0",
"query-string": "^7.1.3",
"zod": "^3.23.8"
},
Expand All @@ -131,8 +118,7 @@
"src/vercel-ai/index.ts",
"src/bin/entry.ts",
"src/schemas.ts",
"src/stream/index.ts",
"src/react/useChatStream.ts"
"src/stream/index.ts"
],
"external": [
"dotenv-flow",
Expand Down
Loading