Skip to content

Commit

Permalink
Merge pull request #78 from ljoukov/functions
Browse files Browse the repository at this point in the history
Add support for "functions" in OpenAI chat completion APIs.
  • Loading branch information
Krivoblotsky authored Jun 20, 2023
2 parents 0641b9c + 2620356 commit b116159
Show file tree
Hide file tree
Showing 7 changed files with 396 additions and 12 deletions.
4 changes: 3 additions & 1 deletion Demo/App/APIProvidedView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ struct APIProvidedView: View {
miscStore: miscStore
)
.onChange(of: apiKey) { newApiKey in
chatStore.openAIClient = OpenAI(apiToken: newApiKey)
let client = OpenAI(apiToken: newApiKey)
chatStore.openAIClient = client
miscStore.openAIClient = client
}
}
}
35 changes: 33 additions & 2 deletions Demo/DemoChat/Sources/ChatStore.swift
Original file line number Diff line number Diff line change
Expand Up @@ -85,22 +85,53 @@ public final class ChatStore: ObservableObject {
return
}

let weatherFunction = ChatFunctionDeclaration(
name: "getWeatherData",
description: "Get the current weather in a given location",
parameters: .init(
type: .object,
properties: [
"location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA")
],
required: ["location"]
)
)

let functions = [weatherFunction]

let chatsStream: AsyncThrowingStream<ChatStreamResult, Error> = openAIClient.chatsStream(
query: ChatQuery(
model: model,
messages: conversation.messages.map { message in
Chat(role: message.role, content: message.content)
}
},
functions: functions
)
)

var functionCallName = ""
var functionCallArguments = ""
for try await partialChatResult in chatsStream {
for choice in partialChatResult.choices {
let existingMessages = conversations[conversationIndex].messages
// Function calls are also streamed, so we need to accumulate.
if let functionCallDelta = choice.delta.functionCall {
if let nameDelta = functionCallDelta.name {
functionCallName += nameDelta
}
if let argumentsDelta = functionCallDelta.arguments {
functionCallArguments += argumentsDelta
}
}
var messageText = choice.delta.content ?? ""
if let finishReason = choice.finishReason,
finishReason == "function_call" {
messageText += "Function call: name=\(functionCallName) arguments=\(functionCallArguments)"
}
let message = Message(
id: partialChatResult.id,
role: choice.delta.role ?? .assistant,
content: choice.delta.content ?? "",
content: messageText,
createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created))
)
if let existingMessageIndex = existingMessages.firstIndex(where: { $0.id == partialChatResult.id }) {
Expand Down
21 changes: 18 additions & 3 deletions Demo/DemoChat/Sources/UI/DetailView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ struct DetailView: View {
@State var inputText: String = ""
@FocusState private var isFocused: Bool
@State private var showsModelSelectionSheet = false
@State private var selectedChatModel: Model = .gpt3_5Turbo
@State private var selectedChatModel: Model = .gpt4_0613

private let availableChatModels: [Model] = [.gpt3_5Turbo, .gpt4]
private let availableChatModels: [Model] = [.gpt3_5Turbo0613, .gpt4_0613]

let conversation: Conversation
let error: Error?
Expand Down Expand Up @@ -237,6 +237,14 @@ struct ChatBubble: View {
.foregroundColor(userForegroundColor)
.background(userBackgroundColor)
.clipShape(RoundedRectangle(cornerRadius: 16, style: .continuous))
case .function:
Text(message.content)
.font(.footnote.monospaced())
.padding(.horizontal, 16)
.padding(.vertical, 12)
.background(assistantBackgroundColor)
.clipShape(RoundedRectangle(cornerRadius: 16, style: .continuous))
Spacer(minLength: 24)
case .system:
EmptyView()
}
Expand All @@ -252,7 +260,14 @@ struct DetailView_Previews: PreviewProvider {
messages: [
Message(id: "1", role: .assistant, content: "Hello, how can I help you today?", createdAt: Date(timeIntervalSinceReferenceDate: 0)),
Message(id: "2", role: .user, content: "I need help with my subscription.", createdAt: Date(timeIntervalSinceReferenceDate: 100)),
Message(id: "3", role: .assistant, content: "Sure, what seems to be the problem with your subscription?", createdAt: Date(timeIntervalSinceReferenceDate: 200))
Message(id: "3", role: .assistant, content: "Sure, what seems to be the problem with your subscription?", createdAt: Date(timeIntervalSinceReferenceDate: 200)),
Message(id: "4", role: .function, content:
"""
get_current_weather({
"location": "Glasgow, Scotland",
"format": "celsius"
})
""", createdAt: Date(timeIntervalSinceReferenceDate: 200))
]
),
error: nil,
Expand Down
57 changes: 57 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,8 @@ Using the OpenAI Chat API, you can build your own applications with `gpt-3.5-tur
public let model: Model
/// The messages to generate chat completions for
public let messages: [Chat]
/// A list of functions the model may generate JSON inputs for.
public let functions: [ChatFunctionDeclaration]?
/// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both.
public let temperature: Double?
/// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
Expand Down Expand Up @@ -318,6 +320,61 @@ for try await result in openAI.chatsStream(query: query) {
}
```

**Function calls**
```swift
let openAI = OpenAI(apiToken: "...")
// Declare functions which GPT-3 might decide to call.
let functions = [
ChatFunctionDeclaration(
name: "get_current_weather",
description: "Get the current weather in a given location",
parameters:
JSONSchema(
type: .object,
properties: [
"location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA"),
"unit": .init(type: .string, enumValues: ["celsius", "fahrenheit"])
],
required: ["location"]
)
)
]
let query = ChatQuery(
model: "gpt-3.5-turbo-0613", // 0613 is the earliest version with function calls support.
messages: [
Chat(role: .user, content: "What's the weather like in Boston?")
],
functions: functions
)
let result = try await openAI.chats(query: query)
```

Result will be (serialized as JSON here for readability):
```json
{
"id": "chatcmpl-1234",
"object": "chat.completion",
"created": 1686000000,
"model": "gpt-3.5-turbo-0613",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"function_call": {
"name": "get_current_weather",
"arguments": "{\n \"location\": \"Boston, MA\"\n}"
}
},
"finish_reason": "function_call"
}
],
"usage": { "total_tokens": 100, "completion_tokens": 18, "prompt_tokens": 82 }
}

```


Review [Chat Documentation](https://platform.openai.com/docs/guides/chat) for more info.

### Images
Expand Down
Loading

0 comments on commit b116159

Please sign in to comment.