Skip to content

Commit 79395f6

Browse files
committed
add latest gpt-5 model, raw string model
1 parent 36c3d41 commit 79395f6

File tree

4 files changed

+193
-70
lines changed

4 files changed

+193
-70
lines changed

Package.resolved

Lines changed: 22 additions & 22 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Package.swift

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,11 @@ let package = Package(
1616
],
1717
dependencies: [
1818
.package(url: "https://github.com/alfianlosari/GPTEncoder.git", exact: "1.0.4"),
19-
.package(url: "https://github.com/apple/swift-openapi-generator", from: "1.2.0"),
20-
.package(url: "https://github.com/apple/swift-openapi-runtime", from: "1.4.0"),
21-
.package(url: "https://github.com/apple/swift-openapi-urlsession", from: "1.0.0"),
22-
.package(url: "https://github.com/swift-server/swift-openapi-async-http-client", from: "1.0.0"),
23-
.package(url: "https://github.com/apple/swift-http-types", from: "1.2.0"),
19+
.package(url: "https://github.com/apple/swift-openapi-generator", from: "1.10.2"),
20+
.package(url: "https://github.com/apple/swift-openapi-runtime", from: "1.8.2"),
21+
.package(url: "https://github.com/apple/swift-openapi-urlsession", from: "1.1.0"),
22+
.package(url: "https://github.com/swift-server/swift-openapi-async-http-client", from: "1.1.0"),
23+
.package(url: "https://github.com/apple/swift-http-types", from: "1.4.0"),
2424
],
2525
targets: [
2626
.target(

Sources/ChatGPTSwift/ChatGPTAPI.swift

Lines changed: 141 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ public typealias ChatCompletionTool = Components.Schemas.ChatCompletionTool
1919
public typealias ChatCompletionResponseMessage = Components.Schemas.ChatCompletionResponseMessage
2020
public typealias ChatGPTModel = Components.Schemas.CreateChatCompletionRequest.modelPayload
2121
.Value2Payload
22+
typealias ModelPayload = Components.Schemas.CreateChatCompletionRequest.modelPayload
2223

2324
public class ChatGPTAPI: @unchecked Sendable {
2425

@@ -109,10 +110,56 @@ public class ChatGPTAPI: @unchecked Sendable {
109110
self.historyList.append(Message(role: "user", content: userText))
110111
self.historyList.append(Message(role: "assistant", content: responseText))
111112
}
112-
113+
114+
public func sendMessageStream(
115+
text: String,
116+
model: String,
117+
systemText: String = ChatGPTAPI.Constants.defaultSystemText,
118+
temperature: Double = ChatGPTAPI.Constants.defaultTemperature,
119+
maxTokens: Int? = nil,
120+
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
121+
nil,
122+
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload? = nil,
123+
imageData: Data? = nil
124+
) async throws -> AsyncMapSequence<
125+
AsyncThrowingPrefixWhileSequence<
126+
AsyncThrowingMapSequence<
127+
ServerSentEventsDeserializationSequence<
128+
ServerSentEventsLineDeserializationSequence<HTTPBody>
129+
>,
130+
ServerSentEventWithJSONData<Components.Schemas.CreateChatCompletionStreamResponse>
131+
>
132+
>, String
133+
> {
134+
try await sendMessageStreamInternal(text: text, model: .init(value1: model, value2: nil), systemText: systemText, temperature: temperature, maxTokens: maxTokens, responseFormat: responseFormat, stop: stop, imageData: imageData)
135+
}
136+
113137
public func sendMessageStream(
114138
text: String,
115-
model: ChatGPTModel = .gpt_hyphen_4o,
139+
model: ChatGPTModel = .gpt_hyphen_4_period_1,
140+
systemText: String = ChatGPTAPI.Constants.defaultSystemText,
141+
temperature: Double = ChatGPTAPI.Constants.defaultTemperature,
142+
maxTokens: Int? = nil,
143+
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
144+
nil,
145+
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload? = nil,
146+
imageData: Data? = nil
147+
) async throws -> AsyncMapSequence<
148+
AsyncThrowingPrefixWhileSequence<
149+
AsyncThrowingMapSequence<
150+
ServerSentEventsDeserializationSequence<
151+
ServerSentEventsLineDeserializationSequence<HTTPBody>
152+
>,
153+
ServerSentEventWithJSONData<Components.Schemas.CreateChatCompletionStreamResponse>
154+
>
155+
>, String
156+
> {
157+
try await sendMessageStreamInternal(text: text, model: .init(value1: nil, value2: model), systemText: systemText, temperature: temperature, maxTokens: maxTokens, responseFormat: responseFormat, stop: stop, imageData: imageData)
158+
}
159+
160+
private func sendMessageStreamInternal(
161+
text: String,
162+
model: ModelPayload,
116163
systemText: String = ChatGPTAPI.Constants.defaultSystemText,
117164
temperature: Double = ChatGPTAPI.Constants.defaultTemperature,
118165
maxTokens: Int? = nil,
@@ -141,7 +188,7 @@ public class ChatGPTAPI: @unchecked Sendable {
141188
body: .json(
142189
.init(
143190
messages: messages,
144-
model: .init(value1: nil, value2: model),
191+
model: model,
145192
max_tokens: maxTokens,
146193
response_format: responseFormat,
147194
stop: stop,
@@ -173,13 +220,42 @@ public class ChatGPTAPI: @unchecked Sendable {
173220
} else {
174221
statusCode = 500
175222
}
176-
throw getError(statusCode: statusCode, model: model.rawValue, payload: nil)
223+
throw getError(statusCode: statusCode, model: model.value1 ?? model.value2?.rawValue, payload: nil)
177224
}
178225
}
179-
226+
227+
228+
public func sendMessage(
229+
text: String,
230+
model: String,
231+
systemText: String = ChatGPTAPI.Constants.defaultSystemText,
232+
temperature: Double = ChatGPTAPI.Constants.defaultTemperature,
233+
maxTokens: Int? = nil,
234+
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
235+
nil,
236+
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload? = nil,
237+
imageData: Data? = nil
238+
) async throws -> String {
239+
try await sendMessageInternal(text: text, model: .init(value1: model, value2: nil), systemText: systemText, temperature: temperature, maxTokens: maxTokens, responseFormat: responseFormat, stop: stop, imageData: imageData)
240+
}
241+
180242
public func sendMessage(
181243
text: String,
182-
model: ChatGPTModel = .gpt_hyphen_4o,
244+
model: ChatGPTModel = .gpt_hyphen_4_period_1,
245+
systemText: String = ChatGPTAPI.Constants.defaultSystemText,
246+
temperature: Double = ChatGPTAPI.Constants.defaultTemperature,
247+
maxTokens: Int? = nil,
248+
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
249+
nil,
250+
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload? = nil,
251+
imageData: Data? = nil
252+
) async throws -> String {
253+
try await sendMessageInternal(text: text, model: .init(value1: nil, value2: model), systemText: systemText, temperature: temperature, maxTokens: maxTokens, responseFormat: responseFormat, stop: stop, imageData: imageData)
254+
}
255+
256+
private func sendMessageInternal(
257+
text: String,
258+
model: ModelPayload,
183259
systemText: String = ChatGPTAPI.Constants.defaultSystemText,
184260
temperature: Double = ChatGPTAPI.Constants.defaultTemperature,
185261
maxTokens: Int? = nil,
@@ -197,7 +273,7 @@ public class ChatGPTAPI: @unchecked Sendable {
197273
body: .json(
198274
.init(
199275
messages: messages,
200-
model: .init(value1: nil, value2: model),
276+
model: model,
201277
max_tokens: maxTokens,
202278
response_format: responseFormat,
203279
stop: stop
@@ -212,15 +288,44 @@ public class ChatGPTAPI: @unchecked Sendable {
212288
self.appendToHistoryList(userText: text, responseText: content)
213289
return content
214290
case .undocumented(let statusCode, let payload):
215-
throw getError(statusCode: statusCode, model: model.rawValue, payload: payload)
291+
throw getError(statusCode: statusCode, model: model.value1 ?? model.value2?.rawValue, payload: payload)
216292
}
217293
}
218-
294+
295+
public func callFunction(
296+
prompt: String,
297+
tools: [ChatCompletionTool],
298+
model: ChatGPTModel = .gpt_hyphen_4_period_1,
299+
maxTokens: Int? = nil,
300+
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
301+
nil,
302+
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload? = nil,
303+
systemText: String =
304+
"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.",
305+
imageData: Data? = nil
306+
) async throws -> ChatCompletionResponseMessage {
307+
try await callFunctionInternal(prompt: prompt, tools: tools, model: .init(value1: nil, value2: model), maxTokens: maxTokens, responseFormat: responseFormat, stop: stop, systemText: systemText, imageData: imageData)
308+
}
309+
219310
public func callFunction(
220311
prompt: String,
221312
tools: [ChatCompletionTool],
222-
model: Components.Schemas.CreateChatCompletionRequest.modelPayload.Value2Payload =
223-
.gpt_hyphen_4,
313+
model: String,
314+
maxTokens: Int? = nil,
315+
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
316+
nil,
317+
stop: Components.Schemas.CreateChatCompletionRequest.stopPayload? = nil,
318+
systemText: String =
319+
"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.",
320+
imageData: Data? = nil
321+
) async throws -> ChatCompletionResponseMessage {
322+
try await callFunctionInternal(prompt: prompt, tools: tools, model: .init(value1: model, value2: nil), maxTokens: maxTokens, responseFormat: responseFormat, stop: stop, systemText: systemText, imageData: imageData)
323+
}
324+
325+
private func callFunctionInternal(
326+
prompt: String,
327+
tools: [ChatCompletionTool],
328+
model: ModelPayload,
224329
maxTokens: Int? = nil,
225330
responseFormat: Components.Schemas.CreateChatCompletionRequest.response_formatPayload? =
226331
nil,
@@ -239,7 +344,7 @@ public class ChatGPTAPI: @unchecked Sendable {
239344
body: .json(
240345
.init(
241346
messages: messages,
242-
model: .init(value1: nil, value2: model),
347+
model: model,
243348
max_tokens: maxTokens,
244349
response_format: responseFormat,
245350
stop: stop,
@@ -254,20 +359,40 @@ public class ChatGPTAPI: @unchecked Sendable {
254359
}
255360
return message
256361
case .undocumented(let statusCode, let payload):
257-
throw getError(statusCode: statusCode, model: model.rawValue, payload: payload)
362+
throw getError(statusCode: statusCode, model: model.value1 ?? model.value2?.rawValue, payload: payload)
258363
}
259364
}
260-
365+
366+
367+
public func generateSpeechFrom(
368+
input: String,
369+
model: String,
370+
voice: Components.Schemas.CreateSpeechRequest.voicePayload = .alloy,
371+
format: Components.Schemas.CreateSpeechRequest.response_formatPayload = .aac
372+
) async throws -> Data {
373+
try await generateSpeechInternalFrom(input: input, model: .init(value1: model, value2: nil), voice: voice, format: format)
374+
}
375+
376+
261377
public func generateSpeechFrom(
262378
input: String,
263379
model: Components.Schemas.CreateSpeechRequest.modelPayload.Value2Payload = .tts_hyphen_1,
264380
voice: Components.Schemas.CreateSpeechRequest.voicePayload = .alloy,
265381
format: Components.Schemas.CreateSpeechRequest.response_formatPayload = .aac
382+
) async throws -> Data {
383+
try await generateSpeechInternalFrom(input: input, model: .init(value1: nil, value2: model), voice: voice, format: format)
384+
}
385+
386+
private func generateSpeechInternalFrom(
387+
input: String,
388+
model: Components.Schemas.CreateSpeechRequest.modelPayload,
389+
voice: Components.Schemas.CreateSpeechRequest.voicePayload = .alloy,
390+
format: Components.Schemas.CreateSpeechRequest.response_formatPayload = .aac
266391
) async throws -> Data {
267392
let response = try await client.createSpeech(
268393
body: .json(
269394
.init(
270-
model: .init(value1: nil, value2: model),
395+
model: model,
271396
input: input,
272397
voice: voice,
273398
response_format: format
@@ -285,7 +410,7 @@ public class ChatGPTAPI: @unchecked Sendable {
285410
}
286411

287412
case .undocumented(let statusCode, let payload):
288-
throw getError(statusCode: statusCode, model: model.rawValue, payload: payload)
413+
throw getError(statusCode: statusCode, model: model.value1 ?? model.value2?.rawValue, payload: payload)
289414
}
290415
}
291416

0 commit comments

Comments
 (0)