Skip to content

Commit

Permalink
Deprecations + cosmetic fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Ihor Makhnyk committed Nov 17, 2023
1 parent 89edb74 commit 4a258f4
Show file tree
Hide file tree
Showing 13 changed files with 51 additions and 42 deletions.
20 changes: 10 additions & 10 deletions Sources/OpenAI/Public/Models/Chat/ChatContent.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//
// File.swift
// ChatContent.swift
//
//
// Created by Federico Vitale on 14/11/23.
Expand All @@ -11,24 +11,24 @@ public struct ChatContent: Codable, Equatable {
let type: ChatContentType
let value: String

enum CodingKeys: CodingKey {
case type
case value
}

public enum ChatContentType: String, Codable {
case text
case imageUrl = "image_url"
}

public struct ImageUrl: Codable, Equatable {
public struct ChatImageUrl: Codable, Equatable {
let url: String

enum CodingKeys: CodingKey {
case url
}
}

enum CodingKeys: CodingKey {
case type
case value
}

public static func text(_ text: String) -> Self {
Self.init(text)
}
Expand All @@ -47,7 +47,7 @@ public struct ChatContent: Codable, Equatable {
self.value = text
}

// we need to perform a custom encoding since the `value` key is variable based on the `type`
// Custom encoding since the `value` key is variable based on the `type`
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: ChatContent.CodingKeys.self)
var dynamicContainer = encoder.container(keyedBy: DynamicKey.self)
Expand All @@ -59,7 +59,7 @@ public struct ChatContent: Codable, Equatable {
try dynamicContainer.encode(value, forKey: .init(stringValue: "text"))
break
case .imageUrl:
var nested = dynamicContainer.nestedContainer(keyedBy: ImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url"))
var nested = dynamicContainer.nestedContainer(keyedBy: ChatImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url"))
try nested.encode(value, forKey: .url)
break
}
Expand All @@ -76,7 +76,7 @@ public struct ChatContent: Codable, Equatable {
self.value = try dynamicContainer.decode(String.self, forKey: .init(stringValue: "text"))
break
case .imageUrl:
let nested = try dynamicContainer.nestedContainer(keyedBy: ImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url"))
let nested = try dynamicContainer.nestedContainer(keyedBy: ChatImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url"))
self.value = try nested.decode(String.self, forKey: .url)
break
}
Expand Down
1 change: 1 addition & 0 deletions Sources/OpenAI/Public/Models/Chat/ChatFunction.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import Foundation

/// Only available for **ASSISTANT** user type.
public struct ChatFunctionCall: Codable, Equatable {
/// The name of the function to call.
public let name: String?
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//
// File.swift
// ChatQuery+.swift
//
//
// Created by Federico Vitale on 14/11/23.
Expand Down Expand Up @@ -28,8 +28,8 @@ extension ChatQuery {
case function(String)

enum CodingKeys: String, CodingKey {
case none = "none"
case auto = "auto"
case none
case auto
case function = "name"
}

Expand Down
8 changes: 4 additions & 4 deletions Sources/OpenAI/Public/Models/Chat/ChatQuery.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public struct ChatQuery: Equatable, Codable, Streamable {
/// A list of functions the model may generate JSON inputs for.
public let functions: [ChatFunctionDeclaration]?

public let tools: [Tool]?
public let tools: [ChatTool]?

/// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between and end-user or calling a function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present.
public let functionCall: FunctionCall?
Expand Down Expand Up @@ -53,7 +53,7 @@ public struct ChatQuery: Equatable, Codable, Streamable {
///
/// `none` is default when no functions are present
/// `auto` is default if functions are present
public let toolChoice: AnyOf<ToolChoice, Tool.ToolValue>?
public let toolChoice: AnyOf<ToolChoice, ChatTool.ToolValue>?

var stream: Bool = false

Expand Down Expand Up @@ -82,8 +82,8 @@ public struct ChatQuery: Equatable, Codable, Streamable {
model: Model,
messages: [Message],
responseFormat: ResponseFormat? = nil,
tools: [Tool]? = nil,
toolChoice: AnyOf<ToolChoice, Tool.ToolValue>? = nil,
tools: [ChatTool]? = nil,
toolChoice: AnyOf<ToolChoice, ChatTool.ToolValue>? = nil,
functions: [ChatFunctionDeclaration]? = nil,
functionCall: FunctionCall? = nil,
temperature: Double? = nil,
Expand Down
24 changes: 11 additions & 13 deletions Sources/OpenAI/Public/Models/Chat/ChatTool.swift
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
//
// File.swift
//
// ChatTool.swift
//
//
// Created by Federico Vitale on 14/11/23.
//

import Foundation

public struct Tool: Codable, Equatable {
/// The type of the tool.
public struct ChatTool: Codable, Equatable {
let type: ToolType
let value: ToolValue

enum CodingKeys: CodingKey {
case type
case value
}

init(type: ToolType, value: ToolValue) {
self.type = type
self.value = value
Expand All @@ -30,27 +34,21 @@ public struct Tool: Codable, Equatable {
}
}


enum CodingKeys: CodingKey {
case type
case value
}

public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
let dynamicContainer = try decoder.container(keyedBy: DynamicKey.self)
self.type = try container.decode(Tool.ToolType.self, forKey: .type)
self.type = try container.decode(ChatTool.ToolType.self, forKey: .type)

switch self.type {
case .function:
self.value = try dynamicContainer.decode(Tool.ToolValue.self, forKey: .init(stringValue: "function"))
self.value = try dynamicContainer.decode(ChatTool.ToolValue.self, forKey: .init(stringValue: "function"))
break
}
}
}


extension Tool {
extension ChatTool {
public enum ToolType: String, Codable {
case function
}
Expand Down
12 changes: 6 additions & 6 deletions Sources/OpenAI/Public/Models/Chat/JSONSchema.swift
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,12 @@ public struct JSONSchema: Codable, Equatable {
}

public enum JSONType: String, Codable {
case integer = "integer"
case string = "string"
case boolean = "boolean"
case array = "array"
case object = "object"
case number = "number"
case integer
case string
case boolean
case array
case object
case number
case `null` = "null"
}

Expand Down
4 changes: 2 additions & 2 deletions Sources/OpenAI/Public/Models/Chat/Message.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//
// File.swift
// Message.swift
//
//
// Created by Federico Vitale on 14/11/23.
Expand Down Expand Up @@ -32,7 +32,7 @@ public struct Message: Codable, Equatable {
case functionCall = "function_call"
}

public init(role: Role, content codable: StringOrChatContent? = nil ,name: String? = nil, functionCall: ChatFunctionCall? = nil) {
public init(role: Role, content codable: StringOrChatContent? = nil, name: String? = nil, functionCall: ChatFunctionCall? = nil) {
let stringOrCodable: StringOrCodable<[ChatContent]>?

if let string = codable as? String {
Expand Down
3 changes: 3 additions & 0 deletions Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import Foundation
@available(tvOS 13.0, *)
@available(watchOS 6.0, *)
public extension OpenAIProtocol {

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func completions(
query: CompletionsQuery
) async throws -> CompletionsResult {
Expand All @@ -27,6 +29,7 @@ public extension OpenAIProtocol {
}
}

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chatsStream instead.")
func completionsStream(
query: CompletionsQuery
) -> AsyncThrowingStream<CompletionsResult, Error> {
Expand Down
2 changes: 2 additions & 0 deletions Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,15 @@ import Combine
@available(watchOS 6.0, *)
public extension OpenAIProtocol {

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func completions(query: CompletionsQuery) -> AnyPublisher<CompletionsResult, Error> {
Future<CompletionsResult, Error> {
completions(query: query, completion: $0)
}
.eraseToAnyPublisher()
}

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func completionsStream(query: CompletionsQuery) -> AnyPublisher<Result<CompletionsResult, Error>, Error> {
let progress = PassthroughSubject<Result<CompletionsResult, Error>, Error>()
completionsStream(query: query) { result in
Expand Down
8 changes: 5 additions & 3 deletions Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ public protocol OpenAIProtocol {
- Parameters:
- query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.
- completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result<CompletionsResult, Error>`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed.
**/
*/
@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func completions(query: CompletionsQuery, completion: @escaping (Result<CompletionsResult, Error>) -> Void)

/**
Expand All @@ -41,7 +42,8 @@ public protocol OpenAIProtocol {
- query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.
- onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result<CompletionsResult, Error>`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed.
- completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured
**/
*/
@available(*, deprecated, message: "CompletionsStream is now marked 'Legacy' in OpenAI API, use chatsStream instead.")
func completionsStream(query: CompletionsQuery, onResult: @escaping (Result<CompletionsResult, Error>) -> Void, completion: ((Error?) -> Void)?)

/**
Expand All @@ -58,7 +60,7 @@ public protocol OpenAIProtocol {
- Parameters:
- query: An `ImagesQuery` object containing the input parameters for the API request. This includes the query parameters such as the model, text prompt, image size, and other settings.
- completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ImagesResult, Error>`, will contain either the `ImagesResult` object with the generated images, or an error if the request failed.
**/
*/
func images(query: ImagesQuery, completion: @escaping (Result<ImagesResult, Error>) -> Void)

/**
Expand Down
2 changes: 2 additions & 0 deletions Tests/OpenAITests/OpenAITests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ class OpenAITests: XCTestCase {
self.openAI = OpenAI(configuration: configuration, session: self.urlSession)
}

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func testCompletions() async throws {
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
let expectedResult = CompletionsResult(id: "foo", object: "bar", created: 100500, model: .babbage, choices: [
Expand All @@ -34,6 +35,7 @@ class OpenAITests: XCTestCase {
XCTAssertEqual(result, expectedResult)
}

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func testCompletionsAPIError() async throws {
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100")
Expand Down
1 change: 1 addition & 0 deletions Tests/OpenAITests/OpenAITestsCombine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ final class OpenAITestsCombine: XCTestCase {
self.openAI = OpenAI(configuration: configuration, session: self.urlSession)
}

@available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.")
func testCompletions() throws {
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
let expectedResult = CompletionsResult(id: "foo", object: "bar", created: 100500, model: .babbage, choices: [
Expand Down
2 changes: 1 addition & 1 deletion Tests/OpenAITests/OpenAITestsDecoder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,7 @@ class OpenAITestsDecoder: XCTestCase {
}
"""

let value = Tool(type: .function, value: .function(.init(
let value = ChatTool(type: .function, value: .function(.init(
name: "test_name",
description: "test_desc",
parameters: .init(type: .object, properties: [
Expand Down

0 comments on commit 4a258f4

Please sign in to comment.