From facca4ae9f830b578bd7e4843d2fb5b48f275a2c Mon Sep 17 00:00:00 2001 From: "bartlomiej.zylinski" Date: Thu, 16 May 2024 11:33:47 +0200 Subject: [PATCH 1/4] Apply most of v2 changes --- README.md | 6 +- core/src/main/scala/sttp/openai/OpenAI.scala | 230 ++++++------------ .../scala/sttp/openai/OpenAISyncClient.scala | 156 ++++-------- .../assistants/AssistantsRequestBody.scala | 36 +-- .../assistants/AssistantsResponseData.scala | 77 +----- .../completions/chat/ChatRequestBody.scala | 5 +- .../completions/chat/message/Attachment.scala | 26 ++ .../completions/chat/message/Tool.scala | 12 +- .../chat/message/ToolResource.scala | 63 +++++ .../chat/message/ToolResources.scala | 40 +++ .../requests/files/FilesResponseData.scala | 4 +- .../requests/threads/ThreadsRequestBody.scala | 2 + .../messages/ThreadMessagesRequestBody.scala | 8 +- .../messages/ThreadMessagesResponseData.scala | 55 +---- .../threads/runs/ThreadRunsRequestBody.scala | 3 +- .../threads/runs/ThreadRunsResponseData.scala | 29 +-- .../requests/vectorstore/ExpiresAfter.scala | 15 ++ .../vectorstore/VectorStoreRequestBody.scala | 36 +++ .../vectorstore/VectorStoreResponseData.scala | 111 +++++++++ .../vectorstore/file/FileStatus.scala | 31 +++ .../file/VectorStoreFileRequestBody.scala | 34 +++ .../file/VectorStoreFileResponseData.scala | 80 ++++++ .../openai/fixtures/AssistantsFixture.scala | 74 ++---- .../fixtures/ThreadMessagesFixture.scala | 50 +--- .../openai/fixtures/ThreadRunsFixture.scala | 53 ++-- .../sttp/openai/fixtures/ThreadsFixture.scala | 29 ++- .../fixtures/VectorStoreFileFixture.scala | 34 +++ .../openai/fixtures/VectorStoreFixture.scala | 41 ++++ .../assistants/AssistantsDataSpec.scala | 109 ++------- .../requests/threads/ThreadsDataSpec.scala | 33 ++- .../messages/ThreadMessagesDataSpec.scala | 70 +----- .../threads/runs/ThreadRunsDataSpec.scala | 25 +- .../vectorstore/VectorStoreDataSpec.scala | 70 ++++++ .../file/VectorStoreFileDataSpec.scala | 68 ++++++ 34 files changed, 993 insertions(+), 722 deletions(-) create mode 100644 core/src/main/scala/sttp/openai/requests/completions/chat/message/Attachment.scala create mode 100644 core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResource.scala create mode 100644 core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResources.scala create mode 100644 core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala create mode 100644 core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala create mode 100644 core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala create mode 100644 core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala create mode 100644 core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala create mode 100644 core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala create mode 100644 core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala create mode 100644 core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala create mode 100644 core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala create mode 100644 core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala diff --git a/README.md b/README.md index 6a6442d8..792529aa 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ sttp is a family of Scala HTTP-related projects, and currently includes: * [sttp client](https://github.com/softwaremill/sttp): The Scala HTTP client you always wanted! * [sttp tapir](https://github.com/softwaremill/tapir): Typed API descRiptions -* sttp openai: this project. Scala client wrapper for OpenAI (and OpenAI-compatible) API. Use the power of ChatGPT inside your code! +* sttp openai: this project. Non-official Scala client wrapper for OpenAI (and OpenAI-compatible) API. Use the power of ChatGPT inside your code! ## Intro Sttp-openai uses sttp client to describe requests and responses used in OpenAI (and OpenAI-compatible) endpoints. @@ -40,7 +40,9 @@ import sttp.openai.requests.completions.chat.message._ object Main extends App { // Create an instance of OpenAISyncClient providing your API secret-key - val openAI: OpenAISyncClient = OpenAISyncClient("your-secret-key") + private val apiKey = System.getProperty("openai-key") + + val openAI: OpenAISyncClient = OpenAISyncClient(apiKey) // Create body of Chat Completions Request val bodyMessages: Seq[Message] = Seq( diff --git a/core/src/main/scala/sttp/openai/OpenAI.scala b/core/src/main/scala/sttp/openai/OpenAI.scala index ce1d8c7d..07fd52b3 100644 --- a/core/src/main/scala/sttp/openai/OpenAI.scala +++ b/core/src/main/scala/sttp/openai/OpenAI.scala @@ -4,15 +4,8 @@ import sttp.client4._ import sttp.model.{Header, Uri} import sttp.openai.OpenAIExceptions.OpenAIException import sttp.openai.json.SttpUpickleApiExtension.{asJsonSnake, asStreamSnake, asStringEither, upickleBodySerializer} -import sttp.openai.requests.assistants.AssistantsRequestBody.{CreateAssistantBody, CreateAssistantFileBody, ModifyAssistantBody} -import sttp.openai.requests.assistants.AssistantsResponseData.{ - AssistantData, - AssistantFileData, - DeleteAssistantFileResponse, - DeleteAssistantResponse, - ListAssistantFilesResponse, - ListAssistantsResponse -} +import sttp.openai.requests.assistants.AssistantsRequestBody.{CreateAssistantBody, ModifyAssistantBody} +import sttp.openai.requests.assistants.AssistantsResponseData.{AssistantData, DeleteAssistantResponse, ListAssistantsResponse} import sttp.openai.requests.completions.CompletionsRequestBody.CompletionsBody import sttp.openai.requests.completions.CompletionsResponseData.CompletionsResponse import sttp.openai.requests.completions.chat.ChatRequestBody.ChatBody @@ -23,12 +16,7 @@ import sttp.openai.requests.embeddings.EmbeddingsRequestBody.EmbeddingsBody import sttp.openai.requests.embeddings.EmbeddingsResponseBody.EmbeddingResponse import sttp.openai.requests.files.FilesResponseData._ import sttp.openai.requests.finetunes.FineTunesRequestBody -import sttp.openai.requests.finetunes.FineTunesResponseData.{ - DeleteFineTuneModelResponse, - FineTuneEventsResponse, - FineTuneResponse, - GetFineTunesResponse -} +import sttp.openai.requests.finetunes.FineTunesResponseData.{DeleteFineTuneModelResponse, FineTuneEventsResponse, FineTuneResponse, GetFineTunesResponse} import sttp.openai.requests.images.ImageResponseData.ImageResponse import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody import sttp.openai.requests.images.edit.ImageEditsConfig @@ -44,21 +32,14 @@ import sttp.capabilities.Streams import sttp.openai.requests.threads.ThreadsRequestBody.CreateThreadBody import sttp.openai.requests.threads.ThreadsResponseData.{DeleteThreadResponse, ThreadData} import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage -import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ - ListMessageFilesResponse, - ListMessagesResponse, - MessageData, - MessageFileData -} -import sttp.openai.requests.threads.runs.ThreadRunsRequestBody.{ - CreateRun, - CreateThreadAndRun, - ModifyRun, - SubmitToolOutputsToRun, - ToolOutput -} +import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ListMessagesResponse, MessageData} +import sttp.openai.requests.threads.runs.ThreadRunsRequestBody.{CreateRun, CreateThreadAndRun, ModifyRun, SubmitToolOutputsToRun, ToolOutput} import sttp.openai.requests.threads.runs.ThreadRunsResponseData.{ListRunStepsResponse, ListRunsResponse, RunData, RunStepData} import sttp.openai.requests.threads.QueryParameters +import sttp.openai.requests.vectorstore.VectorStoreRequestBody.{CreateVectorStoreBody, ModifyVectorStoreBody} +import sttp.openai.requests.vectorstore.VectorStoreResponseData.{DeleteVectorStoreResponse, ListVectorStoresResponse, VectorStore} +import sttp.openai.requests.vectorstore.file.VectorStoreFileRequestBody.{CreateVectorStoreFileBody, ListVectorStoreFilesBody} +import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{DeleteVectorStoreFileResponse, ListVectorStoreFilesResponse, VectorStoreFile} import java.io.File import java.nio.file.Paths @@ -715,30 +696,6 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .response(asJsonSnake[ListMessagesResponse]) } - /** Returns a list of message files. - * - * [[https://platform.openai.com/docs/api-reference/messages/listMessageFiles]] - * - * @param threadId - * The ID of the thread that the message and files belong to. - * - * @param messageId - * The ID of the message that the files belongs to. - */ - def listThreadMessageFiles( - threadId: String, - messageId: String, - queryParameters: QueryParameters = QueryParameters.empty - ): Request[Either[OpenAIException, ListMessageFilesResponse]] = { - val uri = openAIUris - .threadMessageFiles(threadId, messageId) - .withParams(queryParameters.toMap) - - betaOpenAIAuthRequest - .get(uri) - .response(asJsonSnake[ListMessageFilesResponse]) - } - /** Retrieve a message. * * [[https://platform.openai.com/docs/api-reference/messages/getMessage]] @@ -757,28 +714,6 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .get(openAIUris.threadMessage(threadId, messageId)) .response(asJsonSnake[MessageData]) - /** Retrieves a message file. - * - * [[https://platform.openai.com/docs/api-reference/messages/getMessageFile]] - * - * @param threadId - * The ID of the thread to which the message and File belong. - * - * @param messageId - * The ID of the message the file belongs to. - * - * @param fileId - * The ID of the file being retrieved. - */ - def retrieveThreadMessageFile( - threadId: String, - messageId: String, - fileId: String - ): Request[Either[OpenAIException, MessageFileData]] = - betaOpenAIAuthRequest - .get(openAIUris.threadMessageFile(threadId, messageId, fileId)) - .response(asJsonSnake[MessageFileData]) - /** Modifies a message. * * [[https://platform.openai.com/docs/api-reference/messages/modifyMessage]] @@ -808,23 +743,6 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .body(createAssistantBody) .response(asJsonSnake[AssistantData]) - /** Create an assistant file by attaching a File to an assistant. - * - * [[https://platform.openai.com/docs/api-reference/assistants/createAssistantFile]] - * - * @param assistantId - * The ID of the assistant for which to create a File. - * - * @param fileId - * A File ID (with purpose="assistants") that the assistant should use. Useful for tools like retrieval and code_interpreter that can - * access files.. - */ - def createAssistantFile(assistantId: String, fileId: String): Request[Either[OpenAIException, AssistantFileData]] = - betaOpenAIAuthRequest - .post(openAIUris.assistantFiles(assistantId)) - .body(CreateAssistantFileBody(fileId)) - .response(asJsonSnake[AssistantFileData]) - /** Returns a list of assistants. * * [[https://platform.openai.com/docs/api-reference/assistants/listAssistants]] @@ -840,26 +758,6 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .response(asJsonSnake[ListAssistantsResponse]) } - /** Returns a list of assistant files. - * - * [[https://platform.openai.com/docs/api-reference/assistants/listAssistantFiles]] - * - * @param assistantId - * The ID of the assistant the file belongs to. - */ - def listAssistantFiles( - assistantId: String, - queryParameters: QueryParameters = QueryParameters.empty - ): Request[Either[OpenAIException, ListAssistantFilesResponse]] = { - val uri = openAIUris - .assistantFiles(assistantId) - .withParams(queryParameters.toMap) - - betaOpenAIAuthRequest - .get(uri) - .response(asJsonSnake[ListAssistantFilesResponse]) - } - /** Retrieves an assistant. * * [[https://platform.openai.com/docs/api-reference/assistants/getAssistant]] @@ -872,21 +770,6 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .get(openAIUris.assistant(assistantId)) .response(asJsonSnake[AssistantData]) - /** Retrieves an AssistantFile. - * - * [[https://platform.openai.com/docs/api-reference/assistants/getAssistantFile]] - * - * @param assistantId - * The ID of the assistant who the file belongs to. - * - * @param fileId - * The ID of the file we're getting. - */ - def retrieveAssistantFile(assistantId: String, fileId: String): Request[Either[OpenAIException, AssistantFileData]] = - betaOpenAIAuthRequest - .get(openAIUris.assistantFile(assistantId, fileId)) - .response(asJsonSnake[AssistantFileData]) - /** Modifies an assistant. * * [[https://platform.openai.com/docs/api-reference/assistants/modifyAssistant]] @@ -915,21 +798,6 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .delete(openAIUris.assistant(assistantId)) .response(asJsonSnake[DeleteAssistantResponse]) - /** Delete an assistant file. - * - * [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistantFile]] - * - * @param assistantId - * The ID of the assistant that the file belongs to. - * - * @param fileId - * The ID of the file to delete. - */ - def deleteAssistantFile(assistantId: String, fileId: String): Request[Either[OpenAIException, DeleteAssistantFileResponse]] = - betaOpenAIAuthRequest - .delete(openAIUris.assistantFile(assistantId, fileId)) - .response(asJsonSnake[DeleteAssistantFileResponse]) - /** Create a run. * * [[https://platform.openai.com/docs/api-reference/runs/createRun]] @@ -1060,7 +928,7 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .post(openAIUris.threadRunSubmitToolOutputs(threadId, runId)) .body(SubmitToolOutputsToRun(toolOutputs)) .response(asJsonSnake[RunData]) -// + /** Cancels a run that is in_progress. * * [[https://platform.openai.com/docs/api-reference/runs/cancelRun]] @@ -1076,12 +944,70 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .post(openAIUris.threadRunCancel(threadId, runId)) .response(asJsonSnake[RunData]) + def createVectorStore(createVectorStoreBody: CreateVectorStoreBody): Request[Either[OpenAIException, VectorStore]] = + betaOpenAIAuthRequest + .post(openAIUris.VectorStores) + .body(createVectorStoreBody) + .response(asJsonSnake[VectorStore]) + + def listVectorStores( + queryParameters: QueryParameters = QueryParameters.empty + ): Request[Either[OpenAIException, ListVectorStoresResponse]] = + betaOpenAIAuthRequest + .get(openAIUris.VectorStores.withParams(queryParameters.toMap)) + .response(asJsonSnake[ListVectorStoresResponse]) + + def retrieveVectorStore(vectorStoreId: String): Request[Either[OpenAIException, VectorStore]] = + betaOpenAIAuthRequest + .get(openAIUris.vectorStore(vectorStoreId)) + .response(asJsonSnake[VectorStore]) + + def modifyVectorStore( + vectorStoreId: String, + modifyVectorStoreBody: ModifyVectorStoreBody + ): Request[Either[OpenAIException, VectorStore]] = + betaOpenAIAuthRequest + .post(openAIUris.vectorStore(vectorStoreId)) + .body(modifyVectorStoreBody) + .response(asJsonSnake[VectorStore]) + + def deleteVectorStore(vectorStoreId: String): Request[Either[OpenAIException, DeleteVectorStoreResponse]] = + betaOpenAIAuthRequest + .delete(openAIUris.vectorStore(vectorStoreId)) + .response(asJsonSnake[DeleteVectorStoreResponse]) + + def createVectorStoreFile( + vectorStoreId: String, + createVectorStoreFileBody: CreateVectorStoreFileBody + ): Request[Either[OpenAIException, VectorStoreFile]] = + betaOpenAIAuthRequest + .post(openAIUris.vectorStoreFiles(vectorStoreId)) + .body(createVectorStoreFileBody) + .response(asJsonSnake[VectorStoreFile]) + + def listVectorStoreFiles( + vectorStoreId: String, + queryParameters: ListVectorStoreFilesBody = ListVectorStoreFilesBody() + ): Request[Either[OpenAIException, ListVectorStoreFilesResponse]] = + betaOpenAIAuthRequest + .get(openAIUris.vectorStoreFiles(vectorStoreId).withParams(queryParameters.toMap)) + .response(asJsonSnake[ListVectorStoreFilesResponse]) + + def retrieveVectorStoreFile(vectorStoreId: String, fileId: String): Request[Either[OpenAIException, VectorStoreFile]] = + betaOpenAIAuthRequest + .get(openAIUris.vectorStoreFile(vectorStoreId, fileId)) + .response(asJsonSnake[VectorStoreFile]) + + def deleteVectorStoreFile(vectorStoreId: String, fileId: String): Request[Either[OpenAIException, DeleteVectorStoreFileResponse]] = + betaOpenAIAuthRequest + .delete(openAIUris.vectorStoreFile(vectorStoreId, fileId)) + .response(asJsonSnake[DeleteVectorStoreFileResponse]) + protected val openAIAuthRequest: PartialRequest[Either[String, String]] = basicRequest.auth .bearer(authToken) protected val betaOpenAIAuthRequest: PartialRequest[Either[String, String]] = - openAIAuthRequest.withHeaders(openAIAuthRequest.headers :+ Header("OpenAI-Beta", "assistants=v1")) - + openAIAuthRequest.withHeaders(openAIAuthRequest.headers :+ Header("OpenAI-Beta", "assistants=v2")) } private class OpenAIUris(val baseUri: Uri) { @@ -1105,6 +1031,7 @@ private class OpenAIUris(val baseUri: Uri) { val Assistants: Uri = uri"$baseUri/assistants" val Threads: Uri = uri"$baseUri/threads" val ThreadsRuns: Uri = uri"$baseUri/threads/runs" + val VectorStores: Uri = uri"$baseUri/vector_stores" def cancelFineTune(fineTuneId: String): Uri = FineTunes.addPath(fineTuneId, "cancel") def file(fileId: String): Uri = Files.addPath(fileId) @@ -1115,19 +1042,10 @@ private class OpenAIUris(val baseUri: Uri) { def model(modelId: String): Uri = Models.addPath(modelId) def assistant(assistantId: String): Uri = Assistants.addPath(assistantId) - def assistantFiles(assistantId: String): Uri = Assistants.addPath(assistantId).addPath("files") - def assistantFile(assistantId: String, fileId: String): Uri = Assistants.addPath(assistantId).addPath("files").addPath(fileId) def thread(threadId: String): Uri = Threads.addPath(threadId) - def threadMessages(threadId: String): Uri = Threads.addPath(threadId).addPath("messages") def threadMessage(threadId: String, messageId: String): Uri = Threads.addPath(threadId).addPath("messages").addPath(messageId) - - def threadMessageFiles(threadId: String, messageId: String): Uri = - Threads.addPath(threadId).addPath("messages", messageId, "files") - def threadMessageFile(threadId: String, messageId: String, fileId: String): Uri = - Threads.addPath(threadId).addPath("messages", messageId, "files", fileId) - def threadRuns(threadId: String): Uri = Threads.addPath(threadId, "runs") def threadRun(threadId: String, runId: String): Uri = Threads.addPath(threadId, "runs", runId) @@ -1141,6 +1059,12 @@ private class OpenAIUris(val baseUri: Uri) { def threadRunSubmitToolOutputs(threadId: String, runId: String): Uri = Threads.addPath(threadId, "runs", runId, "submit_tool_outputs") + def vectorStore(vectorStoreId: String): Uri = + VectorStores.addPath(vectorStoreId) + def vectorStoreFiles(vectorStoreId: String): Uri = + vectorStore(vectorStoreId).addPath("files") + def vectorStoreFile(vectorStoreId: String, fileId: String): Uri = + vectorStoreFiles(vectorStoreId).addPath(fileId) } object OpenAIUris { diff --git a/core/src/main/scala/sttp/openai/OpenAISyncClient.scala b/core/src/main/scala/sttp/openai/OpenAISyncClient.scala index 6dc52ea8..5821fc5c 100644 --- a/core/src/main/scala/sttp/openai/OpenAISyncClient.scala +++ b/core/src/main/scala/sttp/openai/OpenAISyncClient.scala @@ -1,16 +1,10 @@ package sttp.openai import sttp.client4.{DefaultSyncBackend, Request, SyncBackend} +import sttp.model.Uri import sttp.openai.OpenAIExceptions.OpenAIException import sttp.openai.requests.assistants.AssistantsRequestBody.{CreateAssistantBody, ModifyAssistantBody} -import sttp.openai.requests.assistants.AssistantsResponseData.{ - AssistantData, - AssistantFileData, - DeleteAssistantFileResponse, - DeleteAssistantResponse, - ListAssistantFilesResponse, - ListAssistantsResponse -} +import sttp.openai.requests.assistants.AssistantsResponseData.{AssistantData, DeleteAssistantResponse, ListAssistantsResponse} import sttp.openai.requests.audio.AudioResponseData.AudioResponse import sttp.openai.requests.audio.RecognitionModel import sttp.openai.requests.audio.transcriptions.TranscriptionConfig @@ -38,21 +32,23 @@ import sttp.openai.requests.images.variations.ImageVariationsConfig import sttp.openai.requests.models.ModelsResponseData.{ModelData, ModelsResponse} import sttp.openai.requests.moderations.ModerationsRequestBody.ModerationsBody import sttp.openai.requests.moderations.ModerationsResponseData.ModerationData +import sttp.openai.requests.threads.QueryParameters import sttp.openai.requests.threads.ThreadsRequestBody.CreateThreadBody import sttp.openai.requests.threads.ThreadsResponseData.{DeleteThreadResponse, ThreadData} import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage -import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ - ListMessageFilesResponse, - ListMessagesResponse, - MessageData, - MessageFileData -} -import sttp.openai.requests.threads.QueryParameters +import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ListMessagesResponse, MessageData} import sttp.openai.requests.threads.runs.ThreadRunsRequestBody.{CreateRun, CreateThreadAndRun, ToolOutput} import sttp.openai.requests.threads.runs.ThreadRunsResponseData.{ListRunStepsResponse, ListRunsResponse, RunData, RunStepData} +import sttp.openai.requests.vectorstore.VectorStoreRequestBody.{CreateVectorStoreBody, ModifyVectorStoreBody} +import sttp.openai.requests.vectorstore.VectorStoreResponseData.{DeleteVectorStoreResponse, ListVectorStoresResponse, VectorStore} +import sttp.openai.requests.vectorstore.file.VectorStoreFileRequestBody.{CreateVectorStoreFileBody, ListVectorStoreFilesBody} +import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{ + DeleteVectorStoreFileResponse, + ListVectorStoreFilesResponse, + VectorStoreFile +} import java.io.File -import sttp.model.Uri class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeClient: Boolean, baseUri: Uri) { @@ -495,23 +491,6 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl ): ListMessagesResponse = sendOrThrow(openAI.listThreadMessages(threadId, queryParameters)) - /** Returns a list of message files. - * - * [[https://platform.openai.com/docs/api-reference/messages/listMessageFiles]] - * - * @param threadId - * The ID of the thread that the message and files belong to. - * - * @param messageId - * The ID of the message that the files belongs to. - */ - def listThreadMessageFiles( - threadId: String, - messageId: String, - queryParameters: QueryParameters = QueryParameters.empty - ): ListMessageFilesResponse = - sendOrThrow(openAI.listThreadMessageFiles(threadId, messageId, queryParameters)) - /** Retrieve a message. * * [[https://platform.openai.com/docs/api-reference/messages/getMessage]] @@ -528,26 +507,6 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl ): MessageData = sendOrThrow(openAI.retrieveThreadMessage(threadId, messageId)) - /** Retrieves a message file. - * - * [[https://platform.openai.com/docs/api-reference/messages/getMessageFile]] - * - * @param threadId - * The ID of the thread to which the message and File belong. - * - * @param messageId - * The ID of the message the file belongs to. - * - * @param fileId - * The ID of the file being retrieved. - */ - def retrieveThreadMessageFile( - threadId: String, - messageId: String, - fileId: String - ): MessageFileData = - sendOrThrow(openAI.retrieveThreadMessageFile(threadId, messageId, fileId)) - /** Modifies a message. * * [[https://platform.openai.com/docs/api-reference/messages/modifyMessage]] @@ -571,20 +530,6 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl def createAssistant(createAssistantBody: CreateAssistantBody): AssistantData = sendOrThrow(openAI.createAssistant(createAssistantBody)) - /** Create an assistant file by attaching a File to an assistant. - * - * [[https://platform.openai.com/docs/api-reference/assistants/createAssistantFile]] - * - * @param assistantId - * The ID of the assistant for which to create a File. - * - * @param fileId - * A File ID (with purpose="assistants") that the assistant should use. Useful for tools like retrieval and code_interpreter that can - * access files.. - */ - def createAssistantFile(assistantId: String, fileId: String): AssistantFileData = - sendOrThrow(openAI.createAssistantFile(assistantId, fileId)) - /** Returns a list of assistants. * * [[https://platform.openai.com/docs/api-reference/assistants/listAssistants]] @@ -594,19 +539,6 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl ): ListAssistantsResponse = sendOrThrow(openAI.listAssistants(queryParameters)) - /** Returns a list of assistant files. - * - * [[https://platform.openai.com/docs/api-reference/assistants/listAssistantFiles]] - * - * @param assistantId - * The ID of the assistant the file belongs to. - */ - def listAssistantFiles( - assistantId: String, - queryParameters: QueryParameters = QueryParameters.empty - ): ListAssistantFilesResponse = - sendOrThrow(openAI.listAssistantFiles(assistantId, queryParameters)) - /** Retrieves an assistant. * * [[https://platform.openai.com/docs/api-reference/assistants/getAssistant]] @@ -617,19 +549,6 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl def retrieveAssistant(assistantId: String): AssistantData = sendOrThrow(openAI.retrieveAssistant(assistantId)) - /** Retrieves an AssistantFile. - * - * [[https://platform.openai.com/docs/api-reference/assistants/getAssistantFile]] - * - * @param assistantId - * The ID of the assistant who the file belongs to. - * - * @param fileId - * The ID of the file we're getting. - */ - def retrieveAssistantFile(assistantId: String, fileId: String): AssistantFileData = - sendOrThrow(openAI.retrieveAssistantFile(assistantId, fileId)) - /** Modifies an assistant. * * [[https://platform.openai.com/docs/api-reference/assistants/modifyAssistant]] @@ -652,19 +571,6 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl def deleteAssistant(assistantId: String): DeleteAssistantResponse = sendOrThrow(openAI.deleteAssistant(assistantId)) - /** Delete an assistant file. - * - * [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistantFile]] - * - * @param assistantId - * The ID of the assistant that the file belongs to. - * - * @param fileId - * The ID of the file to delete. - */ - def deleteAssistantFile(assistantId: String, fileId: String): DeleteAssistantFileResponse = - sendOrThrow(openAI.deleteAssistantFile(assistantId, fileId)) - /** Create a run. * * [[https://platform.openai.com/docs/api-reference/runs/createRun]] @@ -784,6 +690,44 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl def cancelRun(threadId: String, runId: String): RunData = sendOrThrow(openAI.cancelRun(threadId, runId)) + def createVectorStore(createVectorStoreBody: CreateVectorStoreBody): VectorStore = + sendOrThrow(openAI.createVectorStore(createVectorStoreBody)) + + def listVectorStores( + queryParameters: QueryParameters = QueryParameters.empty + ): ListVectorStoresResponse = + sendOrThrow(openAI.listVectorStores(queryParameters)) + + def retrieveVectorStore(vectorStoreId: String): VectorStore = + sendOrThrow(openAI.retrieveVectorStore(vectorStoreId)) + + def modifyVectorStore( + vectorStoreId: String, + modifyVectorStoreBody: ModifyVectorStoreBody + ): VectorStore = + sendOrThrow(openAI.modifyVectorStore(vectorStoreId, modifyVectorStoreBody)) + + def deleteVectorStore(vectorStoreId: String): DeleteVectorStoreResponse = + sendOrThrow(openAI.deleteVectorStore(vectorStoreId)) + + def createVectorStoreFile( + vectorStoreId: String, + createVectorStoreFileBody: CreateVectorStoreFileBody + ): VectorStoreFile = + sendOrThrow(openAI.createVectorStoreFile(vectorStoreId, createVectorStoreFileBody)) + + def listVectorStoreFiles( + vectorStoreId: String, + queryParameters: ListVectorStoreFilesBody = ListVectorStoreFilesBody() + ): ListVectorStoreFilesResponse = + sendOrThrow(openAI.listVectorStoreFiles(vectorStoreId, queryParameters)) + + def retrieveVectorStoreFile(vectorStoreId: String, fileId: String): VectorStoreFile = + sendOrThrow(openAI.retrieveVectorStoreFile(vectorStoreId, fileId)) + + def deleteVectorStoreFile(vectorStoreId: String, fileId: String): DeleteVectorStoreFileResponse = + sendOrThrow(openAI.deleteVectorStoreFile(vectorStoreId, fileId)) + /** Closes and releases resources of http client if was not provided explicitly, otherwise works no-op. */ def close(): Unit = if (closeClient) backend.close() else () diff --git a/core/src/main/scala/sttp/openai/requests/assistants/AssistantsRequestBody.scala b/core/src/main/scala/sttp/openai/requests/assistants/AssistantsRequestBody.scala index 58550973..60dfbd55 100644 --- a/core/src/main/scala/sttp/openai/requests/assistants/AssistantsRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/assistants/AssistantsRequestBody.scala @@ -1,7 +1,7 @@ package sttp.openai.requests.assistants import sttp.openai.json.SnakePickle -import sttp.openai.requests.completions.chat.message.Tool +import sttp.openai.requests.completions.chat.message.{Tool, ToolResources} object AssistantsRequestBody { @@ -20,11 +20,11 @@ object AssistantsRequestBody { * * @param tools * A list of tool enabled on the assistant. There can be a maximum of 128 tools per assistant. Tools can be of types code_interpreter, - * retrieval, or function. + * file_search, or function. * - * @param file_ids - * A list of file IDs attached to this assistant. There can be a maximum of 20 files attached to the assistant. Files are ordered by - * their creation date in ascending order. + * @param toolResources + * A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the + * code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. * * @param metadata * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object @@ -38,25 +38,13 @@ object AssistantsRequestBody { description: Option[String] = None, instructions: Option[String] = None, tools: Seq[Tool] = Seq.empty, - file_ids: Seq[String] = Seq.empty, + toolResources: Option[ToolResources] = None, metadata: Option[Map[String, String]] = None ) object CreateAssistantBody { implicit val createAssistantBodyW: SnakePickle.Writer[CreateAssistantBody] = SnakePickle.macroW[CreateAssistantBody] } - /** @param fileId - * A File ID (with purpose="assistants") that the assistant should use. Useful for tools like retrieval and code_interpreter that can - * access files. - * - * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/createAssistantFile]] - */ - case class CreateAssistantFileBody(fileId: String) - - object CreateAssistantFileBody { - implicit val createAssistantFileBodyW: SnakePickle.Writer[CreateAssistantFileBody] = SnakePickle.macroW[CreateAssistantFileBody] - } - /** @param model * ID of the model to use. You can use the List models API to see all of your available models, or see our Model overview for * descriptions of them. @@ -72,13 +60,11 @@ object AssistantsRequestBody { * * @param tools * A list of tool enabled on the assistant. There can be a maximum of 128 tools per assistant. Tools can be of types code_interpreter, - * retrieval, or function. - * - * @param fileIds - * A list of File IDs attached to this assistant. There can be a maximum of 20 files attached to the assistant. Files are ordered by - * their creation date in ascending order. If a file was previously attached to the list but does not show up in the list, it will be - * deleted from the assistant. + * file_search, or function. * + * @param toolResources + * A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the + * code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. v * @param metadata * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object * in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. @@ -91,7 +77,7 @@ object AssistantsRequestBody { description: Option[String] = None, instructions: Option[String] = None, tools: Seq[Tool] = Seq.empty, - fileIds: Seq[String] = Seq.empty, + toolResources: Option[ToolResources] = None, metadata: Map[String, String] = Map.empty ) diff --git a/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala b/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala index d5e243e0..dce4e485 100644 --- a/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala @@ -1,7 +1,7 @@ package sttp.openai.requests.assistants import sttp.openai.json.SnakePickle -import sttp.openai.requests.completions.chat.message.Tool +import sttp.openai.requests.completions.chat.message.{Tool, ToolResources} object AssistantsResponseData { @@ -30,11 +30,11 @@ object AssistantsResponseData { * * @param tools * A list of tool enabled on the assistant. There can be a maximum of 128 tools per assistant. Tools can be of types code_interpreter, - * retrieval, or function. + * file_search, or function. * - * @param fileIds - * A list of file IDs attached to this assistant. There can be a maximum of 20 files attached to the assistant. Files are ordered by - * their creation date in ascending order. + * @param toolResources + * A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the + * code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. * * @param metadata * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object @@ -51,7 +51,7 @@ object AssistantsResponseData { model: String, instructions: Option[String], tools: Seq[Tool], - fileIds: Seq[String], + toolResources: Option[ToolResources], metadata: Map[String, String] ) @@ -59,32 +59,6 @@ object AssistantsResponseData { implicit val assistantDataR: SnakePickle.Reader[AssistantData] = SnakePickle.macroR[AssistantData] } - /** @param id - * The identifier, which can be referenced in API endpoints. - * - * @param object - * The object type, which is always assistant.file. - * - * @param createdAt - * The Unix timestamp (in seconds) for when the assistant file was created. - * - * @param assistantId - * The assistant ID that the file is attached to. - * - * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/file-object]] - */ - - case class AssistantFileData( - id: String, - `object`: String, - createdAt: Int, - assistantId: String - ) - - object AssistantFileData { - implicit val assistantFileDataR: SnakePickle.Reader[AssistantFileData] = SnakePickle.macroR[AssistantFileData] - } - /** @param object * Always "list" * @param data @@ -105,27 +79,6 @@ object AssistantsResponseData { implicit val listAssistantsResponseR: SnakePickle.Reader[ListAssistantsResponse] = SnakePickle.macroR[ListAssistantsResponse] } - /** @param object - * Always "list" - * @param data - * A list of assistant objects. - * @param firstId - * @param lastId - * @param hasMore - * } - */ - case class ListAssistantFilesResponse( - `object`: String = "list", - data: Seq[AssistantFileData], - firstId: String, - lastId: String, - hasMore: Boolean - ) - object ListAssistantFilesResponse { - implicit val listAssistantFilesResponseR: SnakePickle.Reader[ListAssistantFilesResponse] = - SnakePickle.macroR[ListAssistantFilesResponse] - } - /** @param id * @param `object` * assistant.deleted @@ -143,22 +96,4 @@ object AssistantsResponseData { implicit val deleteAssistantResponseReadWriter: SnakePickle.ReadWriter[DeleteAssistantResponse] = SnakePickle.macroRW[DeleteAssistantResponse] } - - /** @param id - * @param `object` - * assistant.file.deleted - * @param deleted - * - * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistantFile]] - */ - case class DeleteAssistantFileResponse( - id: String, - `object`: String, - deleted: Boolean - ) - - object DeleteAssistantFileResponse { - implicit val deleteAssistantFileResponseReadWriter: SnakePickle.ReadWriter[DeleteAssistantFileResponse] = - SnakePickle.macroRW[DeleteAssistantFileResponse] - } } diff --git a/core/src/main/scala/sttp/openai/requests/completions/chat/ChatRequestBody.scala b/core/src/main/scala/sttp/openai/requests/completions/chat/ChatRequestBody.scala index 4ad9a82b..1d312127 100644 --- a/core/src/main/scala/sttp/openai/requests/completions/chat/ChatRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/completions/chat/ChatRequestBody.scala @@ -141,6 +141,8 @@ object ChatRequestBody { case object GPT4TurboVision extends ChatCompletionModel("gpt-4-vision-preview") + case object GPT4o extends ChatCompletionModel("gpt-4o") + case class CustomChatCompletionModel(customChatCompletionModel: String) extends ChatCompletionModel(customChatCompletionModel) val values: Set[ChatCompletionModel] = @@ -152,7 +154,8 @@ object ChatRequestBody { GPT35Turbo, GPT35Turbo0301, GPT4Turbo, - GPT4TurboVision + GPT4TurboVision, + GPT4o ) private val byChatModelValue = values.map(model => model.value -> model).toMap diff --git a/core/src/main/scala/sttp/openai/requests/completions/chat/message/Attachment.scala b/core/src/main/scala/sttp/openai/requests/completions/chat/message/Attachment.scala new file mode 100644 index 00000000..4c6b2dbd --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/completions/chat/message/Attachment.scala @@ -0,0 +1,26 @@ +package sttp.openai.requests.completions.chat.message + +import sttp.openai.json.SnakePickle +import ujson.{Obj, Value} + +case class Attachment(fileId: Option[String] = None, tools: Option[Seq[Tool]] = None) + +object Attachment { + implicit val toolResourcesOptRW: SnakePickle.ReadWriter[Attachment] = SnakePickle + .readwriter[Value] + .bimap[Attachment]( + attachment => + (attachment.fileId, attachment.tools) match { + case (Some(fileId), Some(tools)) => Obj("file_id" -> fileId, "tools" -> SnakePickle.writeJs(tools)) + case (Some(fileId), None) => Obj("file_id" -> fileId) + case (None, Some(tools)) => Obj("tools" -> SnakePickle.writeJs(tools)) + case _ => Obj() + }, + json => { + val map = json.obj + val fileId: Option[String] = map.get("file_id").map(_.str) + val tools: Option[Seq[Tool]] = map.get("tools").map(_.arr.map(e => SnakePickle.read[Tool](e)).toList).filter(_.nonEmpty) + Attachment(fileId, tools) + } + ) +} diff --git a/core/src/main/scala/sttp/openai/requests/completions/chat/message/Tool.scala b/core/src/main/scala/sttp/openai/requests/completions/chat/message/Tool.scala index 4c5d3043..56ac63b3 100644 --- a/core/src/main/scala/sttp/openai/requests/completions/chat/message/Tool.scala +++ b/core/src/main/scala/sttp/openai/requests/completions/chat/message/Tool.scala @@ -29,11 +29,11 @@ object Tool { */ case object CodeInterpreterTool extends Tool - /** Retrieval tool + /** file_search tool * - * The type of tool being defined: retrieval + * The type of tool being defined: file_search */ - case object RetrievalTool extends Tool + case object FileSearchTool extends Tool implicit val toolRW: SnakePickle.ReadWriter[Tool] = SnakePickle .readwriter[Value] @@ -43,14 +43,14 @@ object Tool { Obj("type" -> "function", "function" -> SnakePickle.writeJs(functionTool)) case CodeInterpreterTool => Obj("type" -> "code_interpreter") - case RetrievalTool => - Obj("type" -> "retrieval") + case FileSearchTool => + Obj("type" -> "file_search") }, json => json("type").str match { case "function" => SnakePickle.read[FunctionTool](json("function")) case "code_interpreter" => CodeInterpreterTool - case "retrieval" => RetrievalTool + case "file_search" => FileSearchTool } ) } diff --git a/core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResource.scala b/core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResource.scala new file mode 100644 index 00000000..ca96f619 --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResource.scala @@ -0,0 +1,63 @@ +package sttp.openai.requests.completions.chat.message + +import sttp.openai.json.SnakePickle +import ujson._ + +sealed trait ToolResource + +object ToolResource { + + /** Code interpreter tool + * + * The type of tool being defined: code_interpreter + */ + case class CodeInterpreterToolResource(filesIds: Option[Seq[String]] = None) extends ToolResource + + implicit val codeInterpreterToolResourceRW: SnakePickle.ReadWriter[CodeInterpreterToolResource] = SnakePickle + .readwriter[Value] + .bimap[CodeInterpreterToolResource]( + resource => + resource.filesIds match { + case Some(fileIds) => Obj("file_ids" -> fileIds) + case None => Obj() + }, + json => { + val map = json.obj + if (map.nonEmpty) { + val fileIds = map.get("file_ids").map(_.arr.map(_.str).toList) + CodeInterpreterToolResource(fileIds) + } else { + null + } + } + ) + + /** file_search tool + * + * The type of tool being defined: file_search + */ + case class FileSearchToolResource(vectorStoreIds: Option[Seq[String]] = None, vectorStores: Option[Seq[String]] = None) + extends ToolResource + + implicit val fileSearchToolResourceRW: SnakePickle.ReadWriter[FileSearchToolResource] = SnakePickle + .readwriter[Value] + .bimap[FileSearchToolResource]( + resource => + (resource.vectorStoreIds, resource.vectorStores) match { + case (Some(vectorStoreIds), Some(vectorStores)) => Obj("vector_store_ids" -> vectorStoreIds, "vector_stores" -> vectorStores) + case (Some(vectorStoreIds), None) => Obj("vector_store_ids" -> vectorStoreIds) + case (None, Some(vectorStores)) => Obj("vector_stores" -> vectorStores) + case _ => Obj() + }, + json => { + val map = json.obj + if (map.nonEmpty) { + val storeIds: Option[List[String]] = map.get("vector_store_ids").map(_.arr.map(_.str).toList).filter(_.nonEmpty) + val stores: Option[List[String]] = map.get("vector_stores").map(_.arr.map(_.str).toList).filter(_.nonEmpty) + FileSearchToolResource(storeIds, stores) + } else { + null + } + } + ) +} diff --git a/core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResources.scala b/core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResources.scala new file mode 100644 index 00000000..75099213 --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResources.scala @@ -0,0 +1,40 @@ +package sttp.openai.requests.completions.chat.message + +import sttp.openai.json.SnakePickle +import sttp.openai.requests.completions.chat.message.ToolResource.{CodeInterpreterToolResource, FileSearchToolResource} +import ujson._ + +case class ToolResources( + codeInterpreter: Option[CodeInterpreterToolResource] = None, + fileSearch: Option[FileSearchToolResource] = None +) + +object ToolResources { + + implicit val toolResourcesOptRW: SnakePickle.ReadWriter[Option[ToolResources]] = SnakePickle + .readwriter[Value] + .bimap[Option[ToolResources]]( + { + case Some(resources) => + (resources.fileSearch, resources.codeInterpreter) match { + case (Some(fileSearch), Some(codeInterpreter)) => + Obj("file_search" -> SnakePickle.writeJs(fileSearch), "code_interpreter" -> SnakePickle.writeJs(codeInterpreter)) + case (Some(fileSearch), None) => Obj("file_search" -> SnakePickle.writeJs(fileSearch)) + case (None, Some(codeInterpreter)) => Obj("code_interpreter" -> SnakePickle.writeJs(codeInterpreter)) + case _ => Obj() + } + case None => Obj() + }, + json => { + val map = json.obj + if (map.nonEmpty) { + val codeInterpreter: Option[CodeInterpreterToolResource] = + map.get("code_interpreter").map(e => SnakePickle.read[CodeInterpreterToolResource](e)) + val fileSearch: Option[FileSearchToolResource] = map.get("file_search").map(e => SnakePickle.read[FileSearchToolResource](e)) + Some(ToolResources(codeInterpreter, fileSearch)) + } else { + None + } + } + ) +} diff --git a/core/src/main/scala/sttp/openai/requests/files/FilesResponseData.scala b/core/src/main/scala/sttp/openai/requests/files/FilesResponseData.scala index a90cd241..a9f30da7 100644 --- a/core/src/main/scala/sttp/openai/requests/files/FilesResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/files/FilesResponseData.scala @@ -10,8 +10,8 @@ object FilesResponseData { filename: String, bytes: Int, createdAt: Int, - status: String, - statusDetails: Option[String] + @deprecated("Mark as deprecated in OpenAI spec") status: String, + @deprecated("Mark as deprecated in OpenAI spec") statusDetails: Option[String] ) object FileData { diff --git a/core/src/main/scala/sttp/openai/requests/threads/ThreadsRequestBody.scala b/core/src/main/scala/sttp/openai/requests/threads/ThreadsRequestBody.scala index 386a1670..3e3d58fd 100644 --- a/core/src/main/scala/sttp/openai/requests/threads/ThreadsRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/threads/ThreadsRequestBody.scala @@ -1,6 +1,7 @@ package sttp.openai.requests.threads import sttp.openai.json.SnakePickle +import sttp.openai.requests.completions.chat.message.ToolResources import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage object ThreadsRequestBody { @@ -15,6 +16,7 @@ object ThreadsRequestBody { */ case class CreateThreadBody( messages: Option[Seq[CreateMessage]] = None, + toolResources: Option[ToolResources] = None, metadata: Option[Map[String, String]] = None ) diff --git a/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesRequestBody.scala b/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesRequestBody.scala index d4daa771..54a23518 100644 --- a/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesRequestBody.scala @@ -1,6 +1,7 @@ package sttp.openai.requests.threads.messages import sttp.openai.json.SnakePickle +import sttp.openai.requests.completions.chat.message.Attachment object ThreadMessagesRequestBody { @@ -8,9 +9,8 @@ object ThreadMessagesRequestBody { * string Required The role of the entity that is creating the message. Currently only user is supported. * @param content * string Required The content of the message. - * @param file_ids - * array Optional Defaults to [] A list of File IDs that the message should use. There can be a maximum of 10 files attached to a - * message. Useful for tools like retrieval and code_interpreter that can access and use files. + * @param attachments + * A list of files attached to the message, and the tools they were added to. * @param metadata * map Optional Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information * about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters @@ -21,7 +21,7 @@ object ThreadMessagesRequestBody { case class CreateMessage( role: String, content: String, - file_ids: Seq[String] = Seq.empty, + attachments: Option[Seq[Attachment]] = None, metadata: Option[Map[String, String]] = None ) diff --git a/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesResponseData.scala b/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesResponseData.scala index eedb8936..98010acc 100644 --- a/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesResponseData.scala @@ -1,6 +1,7 @@ package sttp.openai.requests.threads.messages import sttp.openai.json.SnakePickle +import sttp.openai.requests.completions.chat.message.Attachment object ThreadMessagesResponseData { @@ -20,9 +21,8 @@ object ThreadMessagesResponseData { * If applicable, the ID of the assistant that authored this message. * @param runId * If applicable, the ID of the run associated with the authoring of this message. - * @param fileIds - * A list of file IDs that the assistant should use. Useful for tools like retrieval and code_interpreter that can access files. A - * maximum of 10 files can be attached to a message. + * @param attachments + * A list of files attached to the message, and the tools they were added to. * @param metadata * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object * in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. @@ -38,7 +38,7 @@ object ThreadMessagesResponseData { content: Seq[Content], assistantId: Option[String] = None, runId: Option[String] = None, - fileIds: Seq[String] = Seq.empty, + attachments: Option[Seq[Attachment]] = None, metadata: Map[String, String] = Map.empty ) @@ -66,51 +66,6 @@ object ThreadMessagesResponseData { implicit val listMessagesResponseR: SnakePickle.Reader[ListMessagesResponse] = SnakePickle.macroR[ListMessagesResponse] } - /** @param id - * The identifier, which can be referenced in API endpoints. - * - * @param object - * The object type, which is always thread.message.file. - * - * @param createdAt - * The Unix timestamp (in seconds) for when the message file was created. - * - * @param messageId - * The ID of the message that the File is attached to. - * - * For more information please visit: [[https://platform.openai.com/docs/api-reference/messages/file-object]] - */ - case class MessageFileData( - id: String, - `object`: String, - createdAt: Int, - messageId: String - ) - - object MessageFileData { - implicit val messageFileDataR: SnakePickle.Reader[MessageFileData] = SnakePickle.macroR[MessageFileData] - } - - /** @param object - * Always "list" - * @param data - * A list of message file objects. - * @param firstId - * @param lastId - * @param hasMore - * } - */ - case class ListMessageFilesResponse( - `object`: String = "list", - data: Seq[MessageFileData], - firstId: String, - lastId: String, - hasMore: Boolean - ) - object ListMessageFilesResponse { - implicit val listMessageFilesResponseR: SnakePickle.Reader[ListMessageFilesResponse] = SnakePickle.macroR[ListMessageFilesResponse] - } - sealed trait Annotation /** @param fileId @@ -127,7 +82,7 @@ object ThreadMessagesResponseData { implicit val fileCitationR: SnakePickle.Reader[FileCitation] = SnakePickle.macroR[FileCitation] /** A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. - * Generated when the assistant uses the "retrieval" tool to search files. + * Generated when the assistant uses the "file_search" tool to search files. * @param type * Always file_citation. * diff --git a/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsRequestBody.scala b/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsRequestBody.scala index 20fa81a3..32ce4a13 100644 --- a/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsRequestBody.scala @@ -1,7 +1,7 @@ package sttp.openai.requests.threads.runs import sttp.openai.json.SnakePickle -import sttp.openai.requests.completions.chat.message.Tool +import sttp.openai.requests.completions.chat.message.{Tool, ToolResources} import sttp.openai.requests.threads.ThreadsRequestBody.CreateThreadBody object ThreadRunsRequestBody { @@ -69,6 +69,7 @@ object ThreadRunsRequestBody { model: Option[String] = None, instructions: Option[String] = None, tools: Seq[Tool] = Seq.empty, + toolResources: Option[ToolResources] = None, metadata: Map[String, String] = Map.empty ) diff --git a/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala b/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala index da49a2e4..a3a2bf9b 100644 --- a/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala @@ -2,7 +2,7 @@ package sttp.openai.requests.threads.runs import sttp.openai.json.SnakePickle import sttp.openai.requests.completions.Usage -import sttp.openai.requests.completions.chat.message.Tool +import sttp.openai.requests.completions.chat.message.{Tool, ToolResources} object ThreadRunsResponseData { @@ -57,8 +57,9 @@ object ThreadRunsResponseData { * @param tools * The list of tools that the assistant used for this run. * - * @param fileIds - * The list of File IDs the assistant used for this run. + * @param toolResources + * A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the + * code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. * * @param metadata * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object @@ -86,7 +87,7 @@ object ThreadRunsResponseData { model: String, instructions: Option[String] = None, tools: Seq[Tool] = Seq.empty, - fileIds: Seq[String] = Seq.empty, + toolResources: Option[ToolResources] = None, metadata: Map[String, String] = Map.empty, usage: Option[Usage] = None ) @@ -319,7 +320,7 @@ object ThreadRunsResponseData { * Always tool_calls. * @param toolCalls * An array of tool calls the run step was involved in. These can be associated with one of three types of tools: code_interpreter, - * retrieval, or function. + * file_search, or function. */ case class ToolCalls(`type`: String, toolCalls: Seq[ToolCall]) extends StepDetails @@ -339,8 +340,8 @@ object ThreadRunsResponseData { json("type").str match { case "code_interpreter" => SnakePickle.read[CodeInterpreterToolCall](json) - case "retrieval" => - SnakePickle.read[RetrievalToolCall](json) + case "file_search" => + SnakePickle.read[FileSearchToolCall](json) case "function" => SnakePickle.read[FunctionToolCall](json) } @@ -367,24 +368,24 @@ object ThreadRunsResponseData { implicit val codeInterpreterToolCallR: SnakePickle.Reader[CodeInterpreterToolCall] = SnakePickle.macroR[CodeInterpreterToolCall] } - /** Retrieval tool call + /** FileSearch tool call * @param id * The ID of the tool call object. * * @param type - * The type of tool call. This is always going to be retrieval for this type of tool call. + * The type of tool call. This is always going to be file_search for this type of tool call. * - * @param retrieval + * @param fileSearch * For now, this is always going to be an empty object. */ - case class RetrievalToolCall( + case class FileSearchToolCall( id: String, `type`: String, - retrieval: Map[String, String] + fileSearch: Map[String, String] ) extends ToolCall - object RetrievalToolCall { - implicit val retrievalToolCallR: SnakePickle.Reader[RetrievalToolCall] = SnakePickle.macroR[RetrievalToolCall] + object FileSearchToolCall { + implicit val file_searchToolCallR: SnakePickle.Reader[FileSearchToolCall] = SnakePickle.macroR[FileSearchToolCall] } /** Function tool call diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala new file mode 100644 index 00000000..737a352b --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala @@ -0,0 +1,15 @@ +package sttp.openai.requests.vectorstore + +import sttp.openai.json.SnakePickle +import ujson.{Obj, Value} + +case class ExpiresAfter(anchor: String, days: Int) +object ExpiresAfter { + + implicit val expiresAfterRW: SnakePickle.ReadWriter[ExpiresAfter] = SnakePickle + .readwriter[Value] + .bimap[ExpiresAfter]( + ea => Obj("anchor" -> ea.anchor, "days" -> ea.days), + json => ExpiresAfter(json("anchor").str, json("days").num.toInt) + ) +} \ No newline at end of file diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala new file mode 100644 index 00000000..32e8f6cd --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala @@ -0,0 +1,36 @@ +package sttp.openai.requests.vectorstore + +import sttp.openai.json.SnakePickle + +object VectorStoreRequestBody { + + case class CreateVectorStoreBody( + fileIds: Option[Seq[String]] = None, + name: Option[String] = None, + expiresAfter: Option[ExpiresAfter] = None, + metadata: Option[Map[String, String]] = None + ) + + object CreateVectorStoreBody { + implicit val createVectorStoreBodyW: SnakePickle.Writer[CreateVectorStoreBody] = SnakePickle.macroW[CreateVectorStoreBody] + } + + case class RetrieveVectorStoreBody( + vectorStoreId: String + ) + + object RetrieveVectorStoreBody { + implicit val retrieveVectorStoreBody: SnakePickle.Writer[RetrieveVectorStoreBody] = SnakePickle.macroW[RetrieveVectorStoreBody] + } + + case class ModifyVectorStoreBody( + name: Option[String] = None, + expiresAfter: Option[ExpiresAfter] = None, + metadata: Option[Map[String, String]] = None + ) + + object ModifyVectorStoreBody { + implicit val modifyVectorStoreBody: SnakePickle.Writer[ModifyVectorStoreBody] = SnakePickle.macroW[ModifyVectorStoreBody] + } + +} diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala new file mode 100644 index 00000000..aac1373c --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala @@ -0,0 +1,111 @@ +package sttp.openai.requests.vectorstore + +import sttp.openai.json.SnakePickle +import ujson.Value + +object VectorStoreResponseData { + + /** Represents a vector store object. + * + * @param id + * The identifier, which can be referenced in API endpoints. + * @param `object` + * The object type, which is always vector_store. + * @param createdAt + * The Unix timestamp (in seconds) for when the vector store was created. + * @param name + * The name of the vector store. + * @param usageBytes + * The total number of bytes used by the files in the vector store. + * @param fileCounts + * Object containing file count properties. + * @param status + * The status of the vector store. + * @param expiresAfter + * The expiration policy for a vector store. + * @param metadata + * Set of key-value pairs that can be attached to an object. + */ + case class VectorStore( + id: String, + `object`: String = "vector_store", + createdAt: Int, + name: String, + usageBytes: Int, + fileCounts: FileCounts, + status: StoreStatus, + expiresAfter: Option[ExpiresAfter] = None, + expiresAt: Option[Int] = None, + lastActiveAt: Option[Int] = None, + lastUsedAt: Option[Int] = None, + metadata: Map[String, String] = Map.empty + ) + + object VectorStore { + implicit val vectorStoreR: SnakePickle.Reader[VectorStore] = SnakePickle.macroR[VectorStore] + } + + /** Describes number of files in different statuses. + * + * @param inProgress + * The number of files currently in progress. + * @param completed + * The number of files that have been completed successfully. + * @param failed + * The number of files that have failed. + * @param cancelled + * The number of files that have been cancelled. + * @param total + * The total number of files. + */ + case class FileCounts( + inProgress: Int, + completed: Int, + failed: Int, + cancelled: Int, + total: Int + ) + + object FileCounts { + implicit val fileCountsR: SnakePickle.Reader[FileCounts] = SnakePickle.macroR[FileCounts] + } + + sealed trait StoreStatus + case object InProgress extends StoreStatus + case object Completed extends StoreStatus + case object Expired extends StoreStatus + + object StoreStatus { + implicit val storeStatusRW: SnakePickle.Reader[StoreStatus] = SnakePickle + .reader[Value] + .map(json => + json.str match { + case "in_progress" => InProgress + case "completed" => Completed + case "expired" => Expired + } + ) + } + + case class ListVectorStoresResponse( + `object`: String = "list", + data: Seq[VectorStore], + firstId: String, + lastId: String, + hasMore: Boolean + ) + + object ListVectorStoresResponse { + implicit val listVectorStoresResponseR: SnakePickle.Reader[ListVectorStoresResponse] = SnakePickle.macroR[ListVectorStoresResponse] + } + + case class DeleteVectorStoreResponse( + id: String, + `object`: String, + deleted: Boolean + ) + + object DeleteVectorStoreResponse { + implicit val deleteVectorStoreResponseR: SnakePickle.Reader[DeleteVectorStoreResponse] = SnakePickle.macroR[DeleteVectorStoreResponse] + } +} diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala new file mode 100644 index 00000000..36a593e7 --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala @@ -0,0 +1,31 @@ +package sttp.openai.requests.vectorstore.file + +import sttp.openai.json.SnakePickle +import ujson.Value + +sealed trait FileStatus +case object InProgress extends FileStatus +case object Completed extends FileStatus +case object Failed extends FileStatus +case object Cancelled extends FileStatus + +object FileStatus { + implicit val expiresAfterRW: SnakePickle.ReadWriter[FileStatus] = SnakePickle + .readwriter[Value] + .bimap[FileStatus]( + { + case InProgress => Value("in_progress") + case Completed => Value("completed") + case Failed => Value("failed") + case Cancelled => Value("cancelled") + }, + json => { + json.str match { + case "in_progress" => InProgress + case "completed" => Completed + case "failed" => Failed + case "cancelled" => Cancelled + } + } + ) +} diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala new file mode 100644 index 00000000..90a52ec2 --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala @@ -0,0 +1,34 @@ +package sttp.openai.requests.vectorstore.file + +import sttp.openai.json.SnakePickle + +object VectorStoreFileRequestBody { + + case class CreateVectorStoreFileBody( + fileId: String + ) + + object CreateVectorStoreFileBody { + implicit val createVectorStoreFileBodyR: SnakePickle.Writer[CreateVectorStoreFileBody] = SnakePickle.macroW[CreateVectorStoreFileBody] + } + + case class ListVectorStoreFilesBody( + limit: Int = 20, + order: String = "desc", + after: Option[String] = None, + before: Option[String] = None, + filter: Option[FileStatus] = None + ) { + def toMap: Map[String, String] = { + val map = Map("limit" -> limit.toString, "order" -> order) + map ++ + after.map("after" -> _) ++ + before.map("before" -> _) ++ + filter.map("filter" -> _.toString) + } + } + + object ListVectorStoreFilesBody { + implicit val listVectorStoreFilesBodyR: SnakePickle.Writer[ListVectorStoreFilesBody] = SnakePickle.macroW[ListVectorStoreFilesBody] + } +} diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala new file mode 100644 index 00000000..a04d01bc --- /dev/null +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala @@ -0,0 +1,80 @@ +package sttp.openai.requests.vectorstore.file + +import sttp.openai.json.SnakePickle +import ujson.Value + +object VectorStoreFileResponseData { + + /** Represents a vector store object. + * + * @param id + * The identifier, which can be referenced in API endpoints. + * @param `object` + * The object type, which is always vector_store. + * @param createdAt + * The Unix timestamp (in seconds) for when the vector store was created. + * @param usageBytes + * The total number of bytes used by the files in the vector store. + */ + case class VectorStoreFile( + id: String, + `object`: String = "vector_store.file", + usageBytes: Int, + createdAt: Int, + vectorStoreId: String, + status: FileStatus, + lastError: Option[LastError] = None + ) + + object VectorStoreFile { + implicit val vectorStoreFileR: SnakePickle.Reader[VectorStoreFile] = SnakePickle.macroR[VectorStoreFile] + } + + case class LastError(code: ErrorCode, message: String) + + object LastError { + implicit val lastErrorR: SnakePickle.Reader[LastError] = SnakePickle.macroR[LastError] + } + + sealed trait ErrorCode + + case object ServerError extends ErrorCode + + case object RateLimitExceeded extends ErrorCode + + object ErrorCode { + implicit val errorCodeR: SnakePickle.Reader[ErrorCode] = SnakePickle + .reader[Value] + .map(json => + json.str match { + case "server_error" => ServerError + case "rate_limit_exceeded" => RateLimitExceeded + } + ) + } + + case class ListVectorStoreFilesResponse( + `object`: String = "list", + data: Seq[VectorStoreFile], + firstId: String, + lastId: String, + hasMore: Boolean + ) + + object ListVectorStoreFilesResponse { + implicit val listVectorStoreFilesResponseR: SnakePickle.Reader[ListVectorStoreFilesResponse] = + SnakePickle.macroR[ListVectorStoreFilesResponse] + } + + case class DeleteVectorStoreFileResponse( + id: String, + `object`: String, + deleted: Boolean + ) + + object DeleteVectorStoreFileResponse { + implicit val deleteVectorStoreFileResponseR: SnakePickle.Reader[DeleteVectorStoreFileResponse] = + SnakePickle.macroR[DeleteVectorStoreFileResponse] + } + +} diff --git a/core/src/test/scala/sttp/openai/fixtures/AssistantsFixture.scala b/core/src/test/scala/sttp/openai/fixtures/AssistantsFixture.scala index bacfca11..cbfc97b3 100644 --- a/core/src/test/scala/sttp/openai/fixtures/AssistantsFixture.scala +++ b/core/src/test/scala/sttp/openai/fixtures/AssistantsFixture.scala @@ -25,28 +25,11 @@ object AssistantsFixture { | "type": "code_interpreter" | } | ], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {} |} |""".stripMargin - val jsonCreateAssistantFileRequest: String = - """ - |{ - | "file_id": "file-abc123" - |} - |""".stripMargin - - val jsonCreateAssistantFileResponse: String = - """ - |{ - | "id": "file-abc123", - | "object": "assistant.file", - | "created_at": 1699055364, - | "assistant_id": "asst_abc123" - |} - |""".stripMargin - val jsonListAssistantsResponse: String = """ |{ @@ -61,7 +44,7 @@ object AssistantsFixture { | "model": "gpt-4", | "instructions": "You are a helpful assistant designed to make me better at coding!", | "tools": [], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {} | }, | { @@ -73,7 +56,7 @@ object AssistantsFixture { | "model": "gpt-4", | "instructions": "You are a helpful assistant designed to make me better at coding!", | "tools": [], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {} | }, | { @@ -85,7 +68,7 @@ object AssistantsFixture { | "model": "gpt-4", | "instructions": null, | "tools": [], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {} | } | ], @@ -107,33 +90,30 @@ object AssistantsFixture { | "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies.", | "tools": [ | { - | "type": "retrieval" + | "type": "file_search" | } | ], - | "file_ids": [ - | "file-abc123" - | ], + | "tool_resources": { + | "file_search": { + | "vector_store_ids": ["vs_1"] + | } + | + | }, | "metadata": {} |} |""".stripMargin - val jsonRetrieveAssistantFileResponse: String = - """ - |{ - | "id": "file-abc123", - | "object": "assistant.file", - | "created_at": 1699055364, - | "assistant_id": "asst_abc123" - |} - |""".stripMargin - val jsonModifyAssistantRequest: String = """ |{ | "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", - | "tools": [{"type": "retrieval"}], + | "tools": [{"type": "file_search"}], | "model": "gpt-4", - | "file_ids": ["file-abc123", "file-abc456"] + | "tool_resources": { + | "file_search": { + | "vector_store_ids": ["vs_1", "vs_3"] + | } + | } |} |""".stripMargin @@ -149,13 +129,14 @@ object AssistantsFixture { | "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", | "tools": [ | { - | "type": "retrieval" + | "type": "file_search" | } | ], - | "file_ids": [ - | "file-abc123", - | "file-abc456" - | ], + | "tool_resources": { + | "file_search": { + | "vector_store_ids": ["vs_1", "vs_2"] + | } + | }, | "metadata": {} |} |""".stripMargin @@ -168,13 +149,4 @@ object AssistantsFixture { | "deleted": true |} |""".stripMargin - - val jsonDeleteAssistantFileResponse: String = - """ - |{ - | "id": "file-abc123", - | "object": "assistant.file.deleted", - | "deleted": true - |} - |""".stripMargin } diff --git a/core/src/test/scala/sttp/openai/fixtures/ThreadMessagesFixture.scala b/core/src/test/scala/sttp/openai/fixtures/ThreadMessagesFixture.scala index 397f586b..f3189076 100644 --- a/core/src/test/scala/sttp/openai/fixtures/ThreadMessagesFixture.scala +++ b/core/src/test/scala/sttp/openai/fixtures/ThreadMessagesFixture.scala @@ -25,7 +25,7 @@ object ThreadMessagesFixture { | } | } | ], - | "file_ids": [], + | "attachments": [], | "assistant_id": null, | "run_id": null, | "metadata": {} @@ -52,7 +52,7 @@ object ThreadMessagesFixture { | } | } | ], - | "file_ids": [], + | "attachments": [], | "assistant_id": null, | "run_id": null, | "metadata": {} @@ -72,9 +72,12 @@ object ThreadMessagesFixture { | } | } | ], - | "file_ids": [ - | "file-abc123" - | ], + | "attachments": [ + | { + | "file_id" : "file-abc123", + | "tools": [] + | } + | ], | "assistant_id": null, | "run_id": null, | "metadata": {} @@ -86,30 +89,6 @@ object ThreadMessagesFixture { |} |""".stripMargin - val jsonListMessageFilesResponse: String = - """ - |{ - | "object": "list", - | "data": [ - | { - | "id": "file-abc123", - | "object": "thread.message.file", - | "created_at": 1699061776, - | "message_id": "msg_abc123" - | }, - | { - | "id": "file-abc123", - | "object": "thread.message.file", - | "created_at": 1699061776, - | "message_id": "msg_abc123" - | } - | ], - | "first_id": "file-abc123", - | "last_id": "file-abc123", - | "has_more": false - |} - |""".stripMargin - val jsonRetrieveMessageResponse: String = """ |{ @@ -127,23 +106,12 @@ object ThreadMessagesFixture { | } | } | ], - | "file_ids": [], | "assistant_id": null, | "run_id": null, | "metadata": {} |} |""".stripMargin - val jsonRetrieveMessageFileResponse: String = - """ - |{ - | "id": "file-abc123", - | "object": "thread.message.file", - | "created_at": 1699061776, - | "message_id": "msg_abc123" - |} - |""".stripMargin - val jsonModifyMessageResponse: String = """ |{ @@ -161,7 +129,7 @@ object ThreadMessagesFixture { | } | } | ], - | "file_ids": [], + | "attachments": [], | "assistant_id": null, | "run_id": null, | "metadata": { diff --git a/core/src/test/scala/sttp/openai/fixtures/ThreadRunsFixture.scala b/core/src/test/scala/sttp/openai/fixtures/ThreadRunsFixture.scala index 20293d5a..29aa7b93 100644 --- a/core/src/test/scala/sttp/openai/fixtures/ThreadRunsFixture.scala +++ b/core/src/test/scala/sttp/openai/fixtures/ThreadRunsFixture.scala @@ -31,10 +31,11 @@ object ThreadRunsFixture { | "type": "code_interpreter" | } | ], - | "file_ids": [ - | "file-abc123", - | "file-abc456" - | ], + | "tool_resources": { + | "code_interpreter": { + | "file_ids": ["file-abc123", "file-abc456"] + | } + | }, | "metadata": {}, | "usage": null |} @@ -70,7 +71,7 @@ object ThreadRunsFixture { | "model": "gpt-4", | "instructions": "You are a helpful assistant.", | "tools": [], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {}, | "usage": null |} @@ -101,10 +102,11 @@ object ThreadRunsFixture { | "type": "code_interpreter" | } | ], - | "file_ids": [ - | "file-abc123", - | "file-abc456" - | ], + | "tool_resources": { + | "code_interpreter": { + | "file_ids": ["file-abc123", "file-abc456"] + | } + | }, | "metadata": {}, | "usage": { | "prompt_tokens": 123, @@ -132,10 +134,11 @@ object ThreadRunsFixture { | "type": "code_interpreter" | } | ], - | "file_ids": [ - | "file-abc123", - | "file-abc456" - | ], + | "tool_resources": { + | "code_interpreter": { + | "file_ids": ["file-abc123", "file-abc456"] + | } + | }, | "metadata": {}, | "usage": { | "prompt_tokens": 123, @@ -210,10 +213,11 @@ object ThreadRunsFixture { | "type": "code_interpreter" | } | ], - | "file_ids": [ - | "file-abc123", - | "file-abc456" - | ], + | "tool_resources": { + | "code_interpreter": { + | "file_ids": ["file-abc123", "file-abc456"] + | } + | }, | "metadata": {}, | "usage": { | "prompt_tokens": 123, @@ -284,10 +288,11 @@ object ThreadRunsFixture { | "type": "code_interpreter" | } | ], - | "file_ids": [ - | "file-abc123", - | "file-abc456" - | ], + | "tool_resources": { + | "code_interpreter": { + | "file_ids": ["file-abc123", "file-abc456"] + | } + | }, | "metadata": { | "user_id": "user_abc123" | }, @@ -356,7 +361,7 @@ object ThreadRunsFixture { | } | } | ], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {}, | "usage": null |} @@ -381,10 +386,10 @@ object ThreadRunsFixture { | "instructions": "You summarize books.", | "tools": [ | { - | "type": "retrieval" + | "type": "file_search" | } | ], - | "file_ids": [], + | "tool_resources": {}, | "metadata": {}, | "usage": null |} diff --git a/core/src/test/scala/sttp/openai/fixtures/ThreadsFixture.scala b/core/src/test/scala/sttp/openai/fixtures/ThreadsFixture.scala index b4645c58..733b1ce0 100644 --- a/core/src/test/scala/sttp/openai/fixtures/ThreadsFixture.scala +++ b/core/src/test/scala/sttp/openai/fixtures/ThreadsFixture.scala @@ -4,11 +4,29 @@ object ThreadsFixture { val jsonCreateEmptyThreadRequest: String = """{} |""".stripMargin + val jsonCreateThreadWithMessagesRequestNoAttachments: String = """{ + | "messages": [{ + | "role": "user", + | "content": "Hello, what is AI?" + | }, { + | "role": "user", + | "content": "How does AI work? Explain it in simple terms." + | }] + | }""".stripMargin + val jsonCreateThreadWithMessagesRequest: String = """{ | "messages": [{ | "role": "user", | "content": "Hello, what is AI?", - | "file_ids": ["file-abc123"] + | "attachments": [ + | { + | "file_id" : "file-abc123", + | "tools": [ + | { "type": "code_interpreter" }, + | { "type": "file_search" } + | ] + | } + | ] | }, { | "role": "user", | "content": "How does AI work? Explain it in simple terms." @@ -19,7 +37,14 @@ object ThreadsFixture { | "messages": [{ | "role": "user", | "content": "Hello, what is AI?", - | "file_ids": ["file-abc123"] + | "attachments": [ + | { + | "file_id" : "file-abc456", + | "tools": [ + | { "type": "code_interpreter" } + | ] + | } + | ] | }, { | "role": "user", | "content": "How does AI work? Explain it in simple terms." diff --git a/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala new file mode 100644 index 00000000..ba1ca0fd --- /dev/null +++ b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala @@ -0,0 +1,34 @@ +package sttp.openai.fixtures + +object VectorStoreFileFixture { + + val jsonCreateRequest: String = + """{ + | "file_id": "file_1" + |}""".stripMargin + + val jsonObject: String = + """{ + | "id": "vsf_1", + | "object": "vector_store.file", + | "usage_bytes": 123456, + | "created_at": 1698107661, + | "vector_store_id": "vs_1", + | "status": "completed", + | "last_error": null + |}""".stripMargin + + val jsonObjectWithLastError: String = + """{ + | "id": "vsf_1", + | "object": "vector_store.file", + | "usage_bytes": 123456, + | "created_at": 1698107661, + | "vector_store_id": "vs_1", + | "status": "completed", + | "last_error": { + | "code": "server_error", + | "message": "Failed" + | } + |}""".stripMargin +} diff --git a/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala new file mode 100644 index 00000000..9c5a3113 --- /dev/null +++ b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala @@ -0,0 +1,41 @@ +package sttp.openai.fixtures + +object VectorStoreFixture { + + val jsonCreateRequest: String = + """{ + | "file_ids": ["file_1", "file_2"], + | "name": "vs_1" + |}""".stripMargin + + val jsonCreateWithExpiresRequest: String = + """{ + | "file_ids": ["file_1", "file_2"], + | "name": "vs_1", + | "expires_after": { + | "anchor": "11111", + | "days": 2 + | } + |}""".stripMargin + + val jsonObject: String = + """{ + | "id": "vs_1", + | "object": "vector_store", + | "created_at": 1698107661, + | "usage_bytes": 123456, + | "last_active_at": 1698107661, + | "name": "test_vs", + | "status": "in_progress", + | "expires_at": 1698107651, + | "file_counts": { + | "in_progress": 0, + | "completed": 1, + | "cancelled": 2, + | "failed": 1, + | "total": 4 + | }, + | "metadata": {}, + | "last_used_at": 1698107681 + |}""".stripMargin +} diff --git a/core/src/test/scala/sttp/openai/requests/assistants/AssistantsDataSpec.scala b/core/src/test/scala/sttp/openai/requests/assistants/AssistantsDataSpec.scala index fe6fc879..07a253e0 100644 --- a/core/src/test/scala/sttp/openai/requests/assistants/AssistantsDataSpec.scala +++ b/core/src/test/scala/sttp/openai/requests/assistants/AssistantsDataSpec.scala @@ -8,7 +8,9 @@ import sttp.openai.fixtures import sttp.openai.json.SnakePickle import sttp.openai.json.SttpUpickleApiExtension import sttp.openai.requests.completions.chat.message.Tool.CodeInterpreterTool -import sttp.openai.requests.completions.chat.message.Tool.RetrievalTool +import sttp.openai.requests.completions.chat.message.Tool.FileSearchTool +import sttp.openai.requests.completions.chat.message.ToolResource.FileSearchToolResource +import sttp.openai.requests.completions.chat.message.ToolResources class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { @@ -47,7 +49,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { tools = Seq( CodeInterpreterTool ), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ) @@ -55,42 +57,8 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { val givenResponse: Either[Exception, AssistantData] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) // then - givenResponse.value shouldBe expectedResponse - } - - "Given create assistant file request" should "be properly serialized to Json" in { - // given - val givenRequest = AssistantsRequestBody.CreateAssistantFileBody( - fileId = "file-abc123" - ) - - val jsonRequest: ujson.Value = ujson.read(fixtures.AssistantsFixture.jsonCreateAssistantFileRequest) - - // when - val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) - - // then - serializedJson shouldBe jsonRequest - } - - "Given create assistant file response as Json" should "be properly deserialized to case class" in { - import sttp.openai.requests.assistants.AssistantsResponseData.AssistantFileData._ - import sttp.openai.requests.assistants.AssistantsResponseData._ - - // given - val jsonResponse = fixtures.AssistantsFixture.jsonCreateAssistantFileResponse - val expectedResponse: AssistantFileData = AssistantFileData( - id = "file-abc123", - `object` = "assistant.file", - createdAt = 1699055364, - assistantId = "asst_abc123" - ) - - // when - val givenResponse: Either[Exception, AssistantFileData] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) - - // then - givenResponse.value shouldBe expectedResponse + val json = givenResponse.value + json shouldBe expectedResponse } "Given list assistants response as Json" should "be properly deserialized to case class" in { @@ -111,7 +79,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = Some("You are a helpful assistant designed to make me better at coding!"), tools = Seq(), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ), AssistantData( @@ -123,7 +91,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = Some("You are a helpful assistant designed to make me better at coding!"), tools = Seq(), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ), AssistantData( @@ -135,7 +103,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = None, tools = Seq(), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ) ), @@ -169,7 +137,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = Some("You are a helpful assistant designed to make me better at coding!"), tools = Seq(), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ), AssistantData( @@ -181,7 +149,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = Some("You are a helpful assistant designed to make me better at coding!"), tools = Seq(), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ), AssistantData( @@ -193,7 +161,7 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = None, tools = Seq(), - fileIds = Seq(), + toolResources = None, metadata = Map.empty ) ), @@ -224,9 +192,9 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = Some("You are an HR bot, and you have access to files to answer employee questions about company policies."), tools = Seq( - RetrievalTool + FileSearchTool ), - fileIds = Seq("file-abc123"), + toolResources = Some(ToolResources(None, Some(FileSearchToolResource(Some(Seq("vs_1")))))), metadata = Map.empty ) @@ -237,35 +205,15 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { givenResponse.value shouldBe expectedResponse } - "Given retrieve assistant file response as Json" should "be properly deserialized to case class" in { - import sttp.openai.requests.assistants.AssistantsResponseData.AssistantFileData._ - import sttp.openai.requests.assistants.AssistantsResponseData._ - - // given - val jsonResponse = fixtures.AssistantsFixture.jsonRetrieveAssistantFileResponse - val expectedResponse: AssistantFileData = AssistantFileData( - id = "file-abc123", - `object` = "assistant.file", - createdAt = 1699055364, - assistantId = "asst_abc123" - ) - - // when - val givenResponse: Either[Exception, AssistantFileData] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) - - // then - givenResponse.value shouldBe expectedResponse - } - "Given modify assistant request" should "be properly serialized to Json" in { // given val givenRequest = AssistantsRequestBody.ModifyAssistantBody( instructions = Some( "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files." ), - tools = Seq(RetrievalTool), + tools = Seq(FileSearchTool), model = Some("gpt-4"), - fileIds = Seq("file-abc123", "file-abc456") + toolResources = Some(ToolResources(None, Some(FileSearchToolResource(Some(Seq("vs_1", "vs_3")))))) ) val jsonRequest: ujson.Value = ujson.read(fixtures.AssistantsFixture.jsonModifyAssistantRequest) @@ -293,10 +241,8 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { instructions = Some( "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files." ), - tools = Seq( - RetrievalTool - ), - fileIds = Seq("file-abc123", "file-abc456"), + tools = Seq(FileSearchTool), + toolResources = Some(ToolResources(None, Some(FileSearchToolResource(Some(Seq("vs_1", "vs_2")))))), metadata = Map.empty ) @@ -325,23 +271,4 @@ class AssistantsDataSpec extends AnyFlatSpec with Matchers with EitherValues { // then givenResponse.value shouldBe expectedResponse } - - "Given delete assistant file response as Json" should "be properly deserialized to case class" in { - import sttp.openai.requests.assistants.AssistantsResponseData.DeleteAssistantFileResponse._ - import sttp.openai.requests.assistants.AssistantsResponseData._ - - // given - val jsonResponse = fixtures.AssistantsFixture.jsonDeleteAssistantFileResponse - val expectedResponse: DeleteAssistantFileResponse = DeleteAssistantFileResponse( - id = "file-abc123", - `object` = "assistant.file.deleted", - deleted = true - ) - - // when - val givenResponse: Either[Exception, DeleteAssistantFileResponse] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) - - // then - givenResponse.value shouldBe expectedResponse - } } diff --git a/core/src/test/scala/sttp/openai/requests/threads/ThreadsDataSpec.scala b/core/src/test/scala/sttp/openai/requests/threads/ThreadsDataSpec.scala index bada91c1..0c6d51de 100644 --- a/core/src/test/scala/sttp/openai/requests/threads/ThreadsDataSpec.scala +++ b/core/src/test/scala/sttp/openai/requests/threads/ThreadsDataSpec.scala @@ -7,6 +7,8 @@ import sttp.client4.IsOption._ import sttp.openai.fixtures import sttp.openai.json.SnakePickle import sttp.openai.json.SttpUpickleApiExtension +import sttp.openai.requests.completions.chat.message.Attachment +import sttp.openai.requests.completions.chat.message.Tool.{CodeInterpreterTool, FileSearchTool} import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage class ThreadsDataSpec extends AnyFlatSpec with Matchers with EitherValues { @@ -24,6 +26,33 @@ class ThreadsDataSpec extends AnyFlatSpec with Matchers with EitherValues { serializedJson shouldBe jsonRequest } + "Given create threads request with messages and no attachments" should "be properly serialized to Json" in { + + // given + val givenRequest = ThreadsRequestBody.CreateThreadBody( + messages = Some( + Seq( + CreateMessage( + role = "user", + content = "Hello, what is AI?" + ), + CreateMessage( + role = "user", + content = "How does AI work? Explain it in simple terms." + ) + ) + ) + ) + + val jsonRequest: ujson.Value = ujson.read(fixtures.ThreadsFixture.jsonCreateThreadWithMessagesRequestNoAttachments) + + // when + val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) + + // then + serializedJson shouldBe jsonRequest + } + "Given create threads request with messages" should "be properly serialized to Json" in { // given @@ -33,7 +62,7 @@ class ThreadsDataSpec extends AnyFlatSpec with Matchers with EitherValues { CreateMessage( role = "user", content = "Hello, what is AI?", - file_ids = Seq("file-abc123") + attachments = Some(Seq(Attachment(Some("file-abc123"), Some(Seq(CodeInterpreterTool, FileSearchTool))))) ), CreateMessage( role = "user", @@ -61,7 +90,7 @@ class ThreadsDataSpec extends AnyFlatSpec with Matchers with EitherValues { CreateMessage( role = "user", content = "Hello, what is AI?", - file_ids = Seq("file-abc123") + attachments = Some(Seq(Attachment(Some("file-abc456"), Some(Seq(CodeInterpreterTool))))) ), CreateMessage( role = "user", diff --git a/core/src/test/scala/sttp/openai/requests/threads/messages/ThreadMessagesDataSpec.scala b/core/src/test/scala/sttp/openai/requests/threads/messages/ThreadMessagesDataSpec.scala index 4595e95c..883c2e0a 100644 --- a/core/src/test/scala/sttp/openai/requests/threads/messages/ThreadMessagesDataSpec.scala +++ b/core/src/test/scala/sttp/openai/requests/threads/messages/ThreadMessagesDataSpec.scala @@ -5,13 +5,9 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.openai.fixtures import sttp.openai.json.{SnakePickle, SttpUpickleApiExtension} +import sttp.openai.requests.completions.chat.message.Attachment import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.Content.{TextContent, TextContentValue} -import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ - ListMessageFilesResponse, - ListMessagesResponse, - MessageData, - MessageFileData -} +import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ListMessagesResponse, MessageData} class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues { @@ -52,7 +48,7 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues ) ) ), - fileIds = Seq.empty, + attachments = Some(Seq.empty), assistantId = None, runId = None, metadata = Map.empty @@ -87,7 +83,7 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues ) ) ), - fileIds = Seq.empty, + attachments = Some(Seq.empty), assistantId = None, runId = None, metadata = Map.empty @@ -107,9 +103,7 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues ) ) ), - fileIds = Seq( - "file-abc123" - ), + attachments = Some(Seq(Attachment(Some("file-abc123")))), assistantId = None, runId = None, metadata = Map.empty @@ -127,38 +121,6 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues givenResponse.value shouldBe expectedResponse } - "Given list message files response as Json" should "be properly deserialized to case class" in { - import ListMessageFilesResponse._ - // given - val jsonResponse = fixtures.ThreadMessagesFixture.jsonListMessageFilesResponse - val expectedResponse: ListMessageFilesResponse = ListMessageFilesResponse( - `object` = "list", - data = Seq( - MessageFileData( - id = "file-abc123", - `object` = "thread.message.file", - createdAt = 1699061776, - messageId = "msg_abc123" - ), - MessageFileData( - id = "file-abc123", - `object` = "thread.message.file", - createdAt = 1699061776, - messageId = "msg_abc123" - ) - ), - firstId = "file-abc123", - lastId = "file-abc123", - hasMore = false - ) - - // when - val givenResponse: Either[Exception, ListMessageFilesResponse] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) - - // then - givenResponse.value shouldBe expectedResponse - } - "Given retrieve message response as Json" should "be properly deserialized to case class" in { import MessageData._ // given @@ -178,7 +140,7 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues ) ) ), - fileIds = Seq.empty, + attachments = None, assistantId = None, runId = None, metadata = Map.empty @@ -191,24 +153,6 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues givenResponse.value shouldBe expectedResponse } - "Given retrieve message file response as Json" should "be properly deserialized to case class" in { - import MessageFileData._ - // given - val jsonResponse = fixtures.ThreadMessagesFixture.jsonRetrieveMessageFileResponse - val expectedResponse: MessageFileData = MessageFileData( - id = "file-abc123", - `object` = "thread.message.file", - createdAt = 1699061776, - messageId = "msg_abc123" - ) - - // when - val givenResponse: Either[Exception, MessageFileData] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) - - // then - givenResponse.value shouldBe expectedResponse - } - "Given modify message response as Json" should "be properly deserialized to case class" in { import MessageData._ // given @@ -228,7 +172,7 @@ class ThreadMessagesDataSpec extends AnyFlatSpec with Matchers with EitherValues ) ) ), - fileIds = Seq.empty, + attachments = Some(Seq.empty), assistantId = None, runId = None, metadata = Map( diff --git a/core/src/test/scala/sttp/openai/requests/threads/runs/ThreadRunsDataSpec.scala b/core/src/test/scala/sttp/openai/requests/threads/runs/ThreadRunsDataSpec.scala index 0688d88f..67914f93 100644 --- a/core/src/test/scala/sttp/openai/requests/threads/runs/ThreadRunsDataSpec.scala +++ b/core/src/test/scala/sttp/openai/requests/threads/runs/ThreadRunsDataSpec.scala @@ -5,8 +5,10 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.openai.fixtures import sttp.openai.json.{SnakePickle, SttpUpickleApiExtension} -import sttp.openai.requests.completions.chat.message.Tool.{CodeInterpreterTool, FunctionTool, RetrievalTool} +import sttp.openai.requests.completions.chat.message.Tool.{CodeInterpreterTool, FileSearchTool, FunctionTool} import sttp.openai.requests.completions.Usage +import sttp.openai.requests.completions.chat.message.ToolResource.CodeInterpreterToolResource +import sttp.openai.requests.completions.chat.message.ToolResources import sttp.openai.requests.threads.ThreadsRequestBody.CreateThreadBody import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage import sttp.openai.requests.threads.runs.ThreadRunsRequestBody.ToolOutput @@ -52,10 +54,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = None, tools = Seq(CodeInterpreterTool), - fileIds = Seq( - "file-abc123", - "file-abc456" - ), + toolResources = Some(ToolResources(Some(CodeInterpreterToolResource(Some(Seq("file-abc123", "file-abc456")))))), metadata = Map.empty, usage = None ) @@ -107,7 +106,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-4", instructions = Some("You are a helpful assistant."), tools = Seq.empty, - fileIds = Seq.empty, + toolResources = None, metadata = Map.empty, usage = None ) @@ -145,7 +144,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-3.5-turbo", instructions = None, tools = Seq(CodeInterpreterTool), - fileIds = Seq("file-abc123", "file-abc456"), + toolResources = Some(ToolResources(Some(CodeInterpreterToolResource(Some(Seq("file-abc123", "file-abc456")))))), metadata = Map.empty, usage = Some(Usage(promptTokens = 123, completionTokens = 456, totalTokens = 579)) ), @@ -165,7 +164,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-3.5-turbo", instructions = None, tools = Seq(CodeInterpreterTool), - fileIds = Seq("file-abc123", "file-abc456"), + toolResources = Some(ToolResources(Some(CodeInterpreterToolResource(Some(Seq("file-abc123", "file-abc456")))))), metadata = Map.empty, usage = Some( Usage( @@ -258,7 +257,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-3.5-turbo", instructions = None, tools = Seq(CodeInterpreterTool), - fileIds = Seq("file-abc123", "file-abc456"), + toolResources = Some(ToolResources(Some(CodeInterpreterToolResource(Some(Seq("file-abc123", "file-abc456")))))), metadata = Map.empty, usage = Some(Usage(promptTokens = 123, completionTokens = 456, totalTokens = 579)) ) @@ -340,7 +339,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { model = "gpt-3.5-turbo", instructions = None, tools = Seq(CodeInterpreterTool), - fileIds = Seq("file-abc123", "file-abc456"), + toolResources = Some(ToolResources(Some(CodeInterpreterToolResource(Some(Seq("file-abc123", "file-abc456")))))), metadata = Map("user_id" -> "user_abc123"), usage = Some(Usage(promptTokens = 123, completionTokens = 456, totalTokens = 579)) ) @@ -414,7 +413,7 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { ) ) ), - fileIds = Seq.empty, + toolResources = None, metadata = Map.empty, usage = None ) @@ -448,8 +447,8 @@ class ThreadRunsDataSpec extends AnyFlatSpec with Matchers with EitherValues { lastError = None, model = "gpt-4", instructions = Some("You summarize books."), - tools = Seq(RetrievalTool), - fileIds = Seq.empty, + tools = Seq(FileSearchTool), + toolResources = None, metadata = Map.empty, usage = None ) diff --git a/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala b/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala new file mode 100644 index 00000000..0815e8b3 --- /dev/null +++ b/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala @@ -0,0 +1,70 @@ +package sttp.openai.requests.vectorstore + +import org.scalatest.EitherValues +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.openai.fixtures.VectorStoreFixture +import sttp.openai.json.{SnakePickle, SttpUpickleApiExtension} +import sttp.openai.requests.vectorstore.VectorStoreRequestBody.CreateVectorStoreBody +import sttp.openai.requests.vectorstore.VectorStoreResponseData.{FileCounts, InProgress, VectorStore} + +class VectorStoreDataSpec extends AnyFlatSpec with Matchers with EitherValues { + + "Given create vector store request" should "be properly serialized to Json" in { + // given + val givenRequest = CreateVectorStoreBody( + fileIds = Some(Seq("file_1", "file_2")), + name = Some("vs_1") + ) + + val jsonRequest: ujson.Value = ujson.read(VectorStoreFixture.jsonCreateRequest) + + // when + val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) + + // then + serializedJson shouldBe jsonRequest + } + + "Given create vector store request with expires" should "be properly serialized to Json" in { + // given + val givenRequest = CreateVectorStoreBody( + fileIds = Some(Seq("file_1", "file_2")), + name = Some("vs_1"), + expiresAfter = Some(ExpiresAfter("11111", 2)) + ) + + val jsonRequest: ujson.Value = ujson.read(VectorStoreFixture.jsonCreateWithExpiresRequest) + + // when + val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) + + // then + serializedJson shouldBe jsonRequest + } + + "Given create vector store mode" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.VectorStoreResponseData.VectorStore._ + // given + val givenResponse = VectorStore( + id = "vs_1", + `object` = "vector_store", + createdAt = 1698107661, + name = "test_vs", + usageBytes = 123456, + fileCounts = FileCounts(0, 1, 1, 2, 4), + status = InProgress, + expiresAfter = None, + expiresAt = Some(1698107651), + lastActiveAt = Some(1698107661), + lastUsedAt = Some(1698107681) + ) + val jsonResponse = VectorStoreFixture.jsonObject + + // when + val serializedJson: Either[Exception, VectorStore] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } +} \ No newline at end of file diff --git a/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala b/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala new file mode 100644 index 00000000..b9b69589 --- /dev/null +++ b/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala @@ -0,0 +1,68 @@ +package sttp.openai.requests.vectorstore.file + +import org.scalatest.EitherValues +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.openai.fixtures.VectorStoreFileFixture +import sttp.openai.json.{SnakePickle, SttpUpickleApiExtension} +import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{LastError, ServerError, VectorStoreFile} +import sttp.openai.requests.vectorstore.file.VectorStoreFileRequestBody.CreateVectorStoreFileBody + +class VectorStoreFileDataSpec extends AnyFlatSpec with Matchers with EitherValues { + + "Given create vector store file request" should "be properly serialized to Json" in { + // given + val givenRequest = CreateVectorStoreFileBody( + fileId = "file_1" + ) + + val jsonRequest: ujson.Value = ujson.read(VectorStoreFileFixture.jsonCreateRequest) + + // when + val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) + + // then + serializedJson shouldBe jsonRequest + } + + "Vector store file response" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.VectorStoreFile._ + // given + val givenResponse = VectorStoreFile( + id = "vsf_1", + `object` = "vector_store.file", + createdAt = 1698107661, + usageBytes = 123456, + status = Completed, + vectorStoreId = "vs_1", + ) + val jsonResponse = VectorStoreFileFixture.jsonObject + + // when + val serializedJson: Either[Exception, VectorStoreFile] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } + + "Vector store file response with error" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.VectorStoreFile._ + // given + val givenResponse = VectorStoreFile( + id = "vsf_1", + `object` = "vector_store.file", + createdAt = 1698107661, + usageBytes = 123456, + status = Completed, + vectorStoreId = "vs_1", + lastError = Some(LastError(ServerError, "Failed")) + ) + val jsonResponse = VectorStoreFileFixture.jsonObjectWithLastError + + // when + val serializedJson: Either[Exception, VectorStoreFile] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } +} \ No newline at end of file From 57c04ab668b94e1d640a856304d0cc02b7e213a3 Mon Sep 17 00:00:00 2001 From: "bartlomiej.zylinski" Date: Thu, 16 May 2024 11:38:53 +0200 Subject: [PATCH 2/4] Fix typo --- .../openai/requests/threads/runs/ThreadRunsResponseData.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala b/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala index a3a2bf9b..da829990 100644 --- a/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/threads/runs/ThreadRunsResponseData.scala @@ -385,7 +385,7 @@ object ThreadRunsResponseData { ) extends ToolCall object FileSearchToolCall { - implicit val file_searchToolCallR: SnakePickle.Reader[FileSearchToolCall] = SnakePickle.macroR[FileSearchToolCall] + implicit val fileSearchToolCallR: SnakePickle.Reader[FileSearchToolCall] = SnakePickle.macroR[FileSearchToolCall] } /** Function tool call From a3ee92e3b0d3699f61a4344a0ad2dfbf41b360b8 Mon Sep 17 00:00:00 2001 From: "bartlomiej.zylinski" Date: Fri, 17 May 2024 11:26:03 +0200 Subject: [PATCH 3/4] Added scala docs --- README.md | 26 +++-- core/src/main/scala/sttp/openai/OpenAI.scala | 94 ++++++++++++++++++- .../scala/sttp/openai/OpenAISyncClient.scala | 73 ++++++++++++++ .../assistants/AssistantsResponseData.scala | 5 +- .../requests/vectorstore/ExpiresAfter.scala | 9 +- .../vectorstore/VectorStoreRequestBody.scala | 31 ++++-- .../vectorstore/VectorStoreResponseData.scala | 21 ++++- .../vectorstore/file/FileStatus.scala | 8 +- .../file/VectorStoreFileRequestBody.scala | 22 +++++ .../file/VectorStoreFileResponseData.scala | 50 ++++++++-- .../fixtures/VectorStoreFileFixture.scala | 46 +++++++++ .../openai/fixtures/VectorStoreFixture.scala | 56 +++++++++++ .../vectorstore/VectorStoreDataSpec.scala | 83 +++++++++++++++- .../file/VectorStoreFileDataSpec.scala | 85 ++++++++++++++++- 14 files changed, 566 insertions(+), 43 deletions(-) diff --git a/README.md b/README.md index 792529aa..1fb666d2 100644 --- a/README.md +++ b/README.md @@ -39,9 +39,10 @@ import sttp.openai.requests.completions.chat.ChatRequestBody.{ChatBody, ChatComp import sttp.openai.requests.completions.chat.message._ object Main extends App { + // Read your API secret-key from env variables + private val apiKey = System.getenv("openai-key") + // Create an instance of OpenAISyncClient providing your API secret-key - private val apiKey = System.getProperty("openai-key") - val openAI: OpenAISyncClient = OpenAISyncClient(apiKey) // Create body of Chat Completions Request @@ -98,7 +99,6 @@ object Main extends App { content = Content.TextContent("Hello!"), ) ) - val chatRequestBody: ChatBody = ChatBody( // assuming one has already executed `ollama pull mistral` in console @@ -146,7 +146,11 @@ import sttp.openai.requests.completions.chat.message._ object Main extends IOApp { override def run(args: List[String]): IO[ExitCode] = { - val openAI: OpenAI = new OpenAI("your-secret-key", uri"https://api.groq.com/openai/v1") + // Read your API secret-key from env variables + private val apiKey = System.getenv("openai-key") + + // Create an instance of OpenAISyncClient providing your API secret-key + val openAI: OpenAI = new OpenAI(apiKey, uri"https://api.groq.com/openai/v1") val bodyMessages: Seq[Message] = Seq( Message.UserMessage( @@ -158,6 +162,7 @@ object Main extends IOApp { model = ChatCompletionModel.CustomChatCompletionModel("gemma-7b-it"), messages = bodyMessages ) + HttpClientCatsBackend.resource[IO]().use { backend => val response: IO[Either[OpenAIException, ChatResponse]] = openAI @@ -214,7 +219,11 @@ import sttp.openai.requests.completions.chat.message._ object Main extends IOApp { override def run(args: List[String]): IO[ExitCode] = { - val openAI: OpenAI = new OpenAI("your-secret-key") + // Read your API secret-key from env variables + private val apiKey = System.getenv("openai-key") + + // Create an instance of OpenAISyncClient providing your API secret-key + val openAI: OpenAI = new OpenAI(apiKey) val bodyMessages: Seq[Message] = Seq( Message.UserMessage( @@ -226,6 +235,7 @@ object Main extends IOApp { model = ChatCompletionModel.GPT35Turbo, messages = bodyMessages ) + HttpClientCatsBackend.resource[IO]().use { backend => val response: IO[Either[OpenAIException, ChatResponse]] = openAI @@ -284,7 +294,11 @@ import sttp.openai.requests.completions.chat.message._ object Main extends IOApp { override def run(args: List[String]): IO[ExitCode] = { - val openAI: OpenAI = new OpenAI("your-secret-key") + // Read your API secret-key from env variables + private val apiKey = System.getenv("openai-key") + + // Create an instance of OpenAISyncClient providing your API secret-key + val openAI: OpenAI = new OpenAI(apiKey) val bodyMessages: Seq[Message] = Seq( Message.UserMessage( diff --git a/core/src/main/scala/sttp/openai/OpenAI.scala b/core/src/main/scala/sttp/openai/OpenAI.scala index 07fd52b3..6f097182 100644 --- a/core/src/main/scala/sttp/openai/OpenAI.scala +++ b/core/src/main/scala/sttp/openai/OpenAI.scala @@ -16,7 +16,12 @@ import sttp.openai.requests.embeddings.EmbeddingsRequestBody.EmbeddingsBody import sttp.openai.requests.embeddings.EmbeddingsResponseBody.EmbeddingResponse import sttp.openai.requests.files.FilesResponseData._ import sttp.openai.requests.finetunes.FineTunesRequestBody -import sttp.openai.requests.finetunes.FineTunesResponseData.{DeleteFineTuneModelResponse, FineTuneEventsResponse, FineTuneResponse, GetFineTunesResponse} +import sttp.openai.requests.finetunes.FineTunesResponseData.{ + DeleteFineTuneModelResponse, + FineTuneEventsResponse, + FineTuneResponse, + GetFineTunesResponse +} import sttp.openai.requests.images.ImageResponseData.ImageResponse import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody import sttp.openai.requests.images.edit.ImageEditsConfig @@ -33,13 +38,23 @@ import sttp.openai.requests.threads.ThreadsRequestBody.CreateThreadBody import sttp.openai.requests.threads.ThreadsResponseData.{DeleteThreadResponse, ThreadData} import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage import sttp.openai.requests.threads.messages.ThreadMessagesResponseData.{ListMessagesResponse, MessageData} -import sttp.openai.requests.threads.runs.ThreadRunsRequestBody.{CreateRun, CreateThreadAndRun, ModifyRun, SubmitToolOutputsToRun, ToolOutput} +import sttp.openai.requests.threads.runs.ThreadRunsRequestBody.{ + CreateRun, + CreateThreadAndRun, + ModifyRun, + SubmitToolOutputsToRun, + ToolOutput +} import sttp.openai.requests.threads.runs.ThreadRunsResponseData.{ListRunStepsResponse, ListRunsResponse, RunData, RunStepData} import sttp.openai.requests.threads.QueryParameters import sttp.openai.requests.vectorstore.VectorStoreRequestBody.{CreateVectorStoreBody, ModifyVectorStoreBody} import sttp.openai.requests.vectorstore.VectorStoreResponseData.{DeleteVectorStoreResponse, ListVectorStoresResponse, VectorStore} import sttp.openai.requests.vectorstore.file.VectorStoreFileRequestBody.{CreateVectorStoreFileBody, ListVectorStoreFilesBody} -import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{DeleteVectorStoreFileResponse, ListVectorStoreFilesResponse, VectorStoreFile} +import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{ + DeleteVectorStoreFileResponse, + ListVectorStoreFilesResponse, + VectorStoreFile +} import java.io.File import java.nio.file.Paths @@ -944,12 +959,26 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .post(openAIUris.threadRunCancel(threadId, runId)) .response(asJsonSnake[RunData]) + /** Creates vector store + * + * @param createVectorStoreBody + * Options for new vector store + * @return + * Newly created vector store or exception + */ def createVectorStore(createVectorStoreBody: CreateVectorStoreBody): Request[Either[OpenAIException, VectorStore]] = betaOpenAIAuthRequest .post(openAIUris.VectorStores) .body(createVectorStoreBody) .response(asJsonSnake[VectorStore]) + /** Lists vector store + * + * @param queryParameters + * Search params + * @return + * List of vector stores matching criteria or exception + */ def listVectorStores( queryParameters: QueryParameters = QueryParameters.empty ): Request[Either[OpenAIException, ListVectorStoresResponse]] = @@ -957,11 +986,27 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .get(openAIUris.VectorStores.withParams(queryParameters.toMap)) .response(asJsonSnake[ListVectorStoresResponse]) + /** Retrieves vector store by id + * + * @param vectorStoreId + * Id of vector store + * @return + * Vector store object or exception + */ def retrieveVectorStore(vectorStoreId: String): Request[Either[OpenAIException, VectorStore]] = betaOpenAIAuthRequest .get(openAIUris.vectorStore(vectorStoreId)) .response(asJsonSnake[VectorStore]) + /** Modifies vector store + * + * @param vectorStoreId + * Id of vector store to modify + * @param modifyVectorStoreBody + * New values for store properties + * @return + * Modified vector store object + */ def modifyVectorStore( vectorStoreId: String, modifyVectorStoreBody: ModifyVectorStoreBody @@ -971,11 +1016,27 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .body(modifyVectorStoreBody) .response(asJsonSnake[VectorStore]) + /** Deletes vector store + * + * @param vectorStoreId + * Id of vector store to be deleted + * @return + * Result of deleted operation + */ def deleteVectorStore(vectorStoreId: String): Request[Either[OpenAIException, DeleteVectorStoreResponse]] = betaOpenAIAuthRequest .delete(openAIUris.vectorStore(vectorStoreId)) .response(asJsonSnake[DeleteVectorStoreResponse]) + /** Creates vector store file + * + * @param vectorStoreId + * Id of vector store for file + * @param createVectorStoreFileBody + * Properties of file + * @return + * Newly created vector store file + */ def createVectorStoreFile( vectorStoreId: String, createVectorStoreFileBody: CreateVectorStoreFileBody @@ -985,6 +1046,15 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .body(createVectorStoreFileBody) .response(asJsonSnake[VectorStoreFile]) + /** List files belonging to particular datastore + * + * @param vectorStoreId + * Id of vector store + * @param queryParameters + * Search params + * @return + * List of vector store files + */ def listVectorStoreFiles( vectorStoreId: String, queryParameters: ListVectorStoreFilesBody = ListVectorStoreFilesBody() @@ -993,11 +1063,29 @@ class OpenAI(authToken: String, baseUri: Uri = OpenAIUris.OpenAIBaseUri) { .get(openAIUris.vectorStoreFiles(vectorStoreId).withParams(queryParameters.toMap)) .response(asJsonSnake[ListVectorStoreFilesResponse]) + /** Retrieves vector store file by id + * + * @param vectorStoreId + * Id of vector store + * @param fileId + * Id of vector store file + * @return + * Vector store file + */ def retrieveVectorStoreFile(vectorStoreId: String, fileId: String): Request[Either[OpenAIException, VectorStoreFile]] = betaOpenAIAuthRequest .get(openAIUris.vectorStoreFile(vectorStoreId, fileId)) .response(asJsonSnake[VectorStoreFile]) + /** Deletes vector store file by id + * + * @param vectorStoreId + * Id of vector store + * @param fileId + * Id of vector store file + * @return + * Result of delete operation + */ def deleteVectorStoreFile(vectorStoreId: String, fileId: String): Request[Either[OpenAIException, DeleteVectorStoreFileResponse]] = betaOpenAIAuthRequest .delete(openAIUris.vectorStoreFile(vectorStoreId, fileId)) diff --git a/core/src/main/scala/sttp/openai/OpenAISyncClient.scala b/core/src/main/scala/sttp/openai/OpenAISyncClient.scala index 5821fc5c..45314b12 100644 --- a/core/src/main/scala/sttp/openai/OpenAISyncClient.scala +++ b/core/src/main/scala/sttp/openai/OpenAISyncClient.scala @@ -690,41 +690,114 @@ class OpenAISyncClient private (authToken: String, backend: SyncBackend, closeCl def cancelRun(threadId: String, runId: String): RunData = sendOrThrow(openAI.cancelRun(threadId, runId)) + /** Creates vector store + * + * @param createVectorStoreBody + * Options for new vector store + * @return + * Newly created vector store or exception + */ def createVectorStore(createVectorStoreBody: CreateVectorStoreBody): VectorStore = sendOrThrow(openAI.createVectorStore(createVectorStoreBody)) + /** Lists vector store + * + * @param queryParameters + * Search params + * @return + * List of vector stores matching criteria or exception + */ def listVectorStores( queryParameters: QueryParameters = QueryParameters.empty ): ListVectorStoresResponse = sendOrThrow(openAI.listVectorStores(queryParameters)) + /** Retrieves vector store by id + * + * @param vectorStoreId + * Id of vector store + * @return + * Vector store object or exception + */ def retrieveVectorStore(vectorStoreId: String): VectorStore = sendOrThrow(openAI.retrieveVectorStore(vectorStoreId)) + /** Modifies vector store + * + * @param vectorStoreId + * Id of vector store to modify + * @param modifyVectorStoreBody + * New values for store properties + * @return + * Modified vector store object + */ def modifyVectorStore( vectorStoreId: String, modifyVectorStoreBody: ModifyVectorStoreBody ): VectorStore = sendOrThrow(openAI.modifyVectorStore(vectorStoreId, modifyVectorStoreBody)) + /** Deletes vector store + * + * @param vectorStoreId + * Id of vector store to be deleted + * @return + * Result of deleted operation + */ def deleteVectorStore(vectorStoreId: String): DeleteVectorStoreResponse = sendOrThrow(openAI.deleteVectorStore(vectorStoreId)) + /** Creates vector store file + * + * @param vectorStoreId + * Id of vector store for file + * @param createVectorStoreFileBody + * Properties of file + * @return + * Newly created vector store file + */ def createVectorStoreFile( vectorStoreId: String, createVectorStoreFileBody: CreateVectorStoreFileBody ): VectorStoreFile = sendOrThrow(openAI.createVectorStoreFile(vectorStoreId, createVectorStoreFileBody)) + /** List files belonging to particular datastore + * + * @param vectorStoreId + * Id of vector store + * @param queryParameters + * Search params + * @return + * List of vector store files + */ def listVectorStoreFiles( vectorStoreId: String, queryParameters: ListVectorStoreFilesBody = ListVectorStoreFilesBody() ): ListVectorStoreFilesResponse = sendOrThrow(openAI.listVectorStoreFiles(vectorStoreId, queryParameters)) + /** Retrieves vector store file by id + * + * @param vectorStoreId + * Id of vector store + * @param fileId + * Id of vector store file + * @return + * Vector store file + */ def retrieveVectorStoreFile(vectorStoreId: String, fileId: String): VectorStoreFile = sendOrThrow(openAI.retrieveVectorStoreFile(vectorStoreId, fileId)) + /** Deletes vector store file by id + * + * @param vectorStoreId + * Id of vector store + * @param fileId + * Id of vector store file + * @return + * Result of delete operation + */ def deleteVectorStoreFile(vectorStoreId: String, fileId: String): DeleteVectorStoreFileResponse = sendOrThrow(openAI.deleteVectorStoreFile(vectorStoreId, fileId)) diff --git a/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala b/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala index dce4e485..2a415f3d 100644 --- a/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala @@ -64,9 +64,11 @@ object AssistantsResponseData { * @param data * A list of assistant objects. * @param firstId + * Id of first object * @param lastId + * Id of last object * @param hasMore - * } + * Denotes if there are more object available } */ case class ListAssistantsResponse( `object`: String = "list", @@ -80,6 +82,7 @@ object AssistantsResponseData { } /** @param id + * Id of deleted object * @param `object` * assistant.deleted * @param deleted diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala index 737a352b..3b23b263 100644 --- a/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala @@ -3,6 +3,13 @@ package sttp.openai.requests.vectorstore import sttp.openai.json.SnakePickle import ujson.{Obj, Value} +/** Represents the expiration policy for a vector store. + * + * @param anchor + * Required. Anchor timestamp after which the expiration policy applies. Supported anchors: last_active_at. + * @param days + * Required. The number of days after the anchor time that the vector store will expire. + */ case class ExpiresAfter(anchor: String, days: Int) object ExpiresAfter { @@ -12,4 +19,4 @@ object ExpiresAfter { ea => Obj("anchor" -> ea.anchor, "days" -> ea.days), json => ExpiresAfter(json("anchor").str, json("days").num.toInt) ) -} \ No newline at end of file +} diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala index 32e8f6cd..834f0715 100644 --- a/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala @@ -4,9 +4,21 @@ import sttp.openai.json.SnakePickle object VectorStoreRequestBody { + /** Represents options for creating vector store. + * + * @param fileIds + * Optional. A list of File IDs that the vector store should use. Useful for tools like file_search that can access files. + * @param name + * Optional. The name of the vector store. + * @param expiresAfter + * Optional. The expiration policy for a vector store. + * @param metadata + * Optional. Set of 16 key-value pairs that can be attached to an object. Useful for storing additional information about the object in + * a structured format. Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + */ case class CreateVectorStoreBody( - fileIds: Option[Seq[String]] = None, name: Option[String] = None, + fileIds: Option[Seq[String]] = None, expiresAfter: Option[ExpiresAfter] = None, metadata: Option[Map[String, String]] = None ) @@ -15,14 +27,15 @@ object VectorStoreRequestBody { implicit val createVectorStoreBodyW: SnakePickle.Writer[CreateVectorStoreBody] = SnakePickle.macroW[CreateVectorStoreBody] } - case class RetrieveVectorStoreBody( - vectorStoreId: String - ) - - object RetrieveVectorStoreBody { - implicit val retrieveVectorStoreBody: SnakePickle.Writer[RetrieveVectorStoreBody] = SnakePickle.macroW[RetrieveVectorStoreBody] - } - + /** Represents options for modifying vector store. + * @param name + * Optional. The name of the vector store. + * @param expiresAfter + * Optional. The expiration policy for a vector store. + * @param metadata + * Optional. Set of 16 key-value pairs that can be attached to an object. Useful for storing additional information about the object in + * a structured format. Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + */ case class ModifyVectorStoreBody( name: Option[String] = None, expiresAfter: Option[ExpiresAfter] = None, diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala index aac1373c..254fc0bb 100644 --- a/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala @@ -76,7 +76,7 @@ object VectorStoreResponseData { case object Expired extends StoreStatus object StoreStatus { - implicit val storeStatusRW: SnakePickle.Reader[StoreStatus] = SnakePickle + implicit val storeStatusR: SnakePickle.Reader[StoreStatus] = SnakePickle .reader[Value] .map(json => json.str match { @@ -87,6 +87,17 @@ object VectorStoreResponseData { ) } + /** @param object + * Always "list" + * @param data + * A list of vector store objects. + * @param firstId + * Id of first object + * @param lastId + * Id of last object + * @param hasMore + * Denotes if there are more object available + */ case class ListVectorStoresResponse( `object`: String = "list", data: Seq[VectorStore], @@ -99,6 +110,14 @@ object VectorStoreResponseData { implicit val listVectorStoresResponseR: SnakePickle.Reader[ListVectorStoresResponse] = SnakePickle.macroR[ListVectorStoresResponse] } + /** @param id + * Id of deleted object + * @param `object` + * vector_store.deleted + * @param deleted + * boolean describing whether or not operation was successful + * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistant]] + */ case class DeleteVectorStoreResponse( id: String, `object`: String, diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala index 36a593e7..a4b9a14d 100644 --- a/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala @@ -14,10 +14,10 @@ object FileStatus { .readwriter[Value] .bimap[FileStatus]( { - case InProgress => Value("in_progress") - case Completed => Value("completed") - case Failed => Value("failed") - case Cancelled => Value("cancelled") + case InProgress => "in_progress" + case Completed => "completed" + case Failed => "failed" + case Cancelled => "cancelled" }, json => { json.str match { diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala index 90a52ec2..894521c6 100644 --- a/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala @@ -4,6 +4,11 @@ import sttp.openai.json.SnakePickle object VectorStoreFileRequestBody { + /** Create a vector store file by attaching a File to a vector store. + * + * @param fileId + * A File ID that the vector store should use. Useful for tools like file_search that can access files. + */ case class CreateVectorStoreFileBody( fileId: String ) @@ -12,6 +17,23 @@ object VectorStoreFileRequestBody { implicit val createVectorStoreFileBodyR: SnakePickle.Writer[CreateVectorStoreFileBody] = SnakePickle.macroW[CreateVectorStoreFileBody] } + /** Represents options for listing objects with pagination and filtering. + * + * @param limit + * Defaults to 20 A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + * @param order + * Defaults to desc Sort order by the created_at timestamp of the objects. asc for ascending order and desc for descending order. + * @param after + * A cursor for use in pagination. after is an object ID that defines your place in the list. For instance, if you make a list request + * and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the + * list. + * @param before + * A cursor for use in pagination. before is an object ID that defines your place in the list. For instance, if you make a list request + * and receive 100 objects, ending with obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of + * the list. + * @param filter + * Optional. Filter by file status. Possible values are "in_progress", "completed", "failed", "cancelled". + */ case class ListVectorStoreFilesBody( limit: Int = 20, order: String = "desc", diff --git a/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala index a04d01bc..fe85455d 100644 --- a/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala +++ b/core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala @@ -5,20 +5,27 @@ import ujson.Value object VectorStoreFileResponseData { - /** Represents a vector store object. + /** Represents a vector store file. * * @param id * The identifier, which can be referenced in API endpoints. - * @param `object` - * The object type, which is always vector_store. - * @param createdAt - * The Unix timestamp (in seconds) for when the vector store was created. + * @param object + * The object type, which is always vector_store.file. * @param usageBytes - * The total number of bytes used by the files in the vector store. + * The total vector store usage in bytes. Note that this may be different from the original file size. + * @param createdAt + * The Unix timestamp (in seconds) for when the vector store file was created. + * @param vectorStoreId + * The ID of the vector store that the File is attached to. + * @param status + * The status of the vector store file. Possible values are "in_progress", "completed", "cancelled", or "failed". The status + * "completed" indicates that the vector store file is ready for use. + * @param lastError + * The last error associated with this vector store file, or null if there are no errors. */ case class VectorStoreFile( id: String, - `object`: String = "vector_store.file", + `object`: String, usageBytes: Int, createdAt: Int, vectorStoreId: String, @@ -30,6 +37,13 @@ object VectorStoreFileResponseData { implicit val vectorStoreFileR: SnakePickle.Reader[VectorStoreFile] = SnakePickle.macroR[VectorStoreFile] } + /** Represents the last error associated with a vector store file. + * + * @param code + * The error code. Possible values are "server_error" or "rate_limit_exceeded". + * @param message + * A human-readable description of the error. + */ case class LastError(code: ErrorCode, message: String) object LastError { @@ -37,9 +51,7 @@ object VectorStoreFileResponseData { } sealed trait ErrorCode - case object ServerError extends ErrorCode - case object RateLimitExceeded extends ErrorCode object ErrorCode { @@ -53,6 +65,17 @@ object VectorStoreFileResponseData { ) } + /** @param object + * Always "list" + * @param data + * A list of vector store file objects. + * @param firstId + * Id of first object + * @param lastId + * Id of last object + * @param hasMore + * Denotes if there are more object available + */ case class ListVectorStoreFilesResponse( `object`: String = "list", data: Seq[VectorStoreFile], @@ -65,7 +88,14 @@ object VectorStoreFileResponseData { implicit val listVectorStoreFilesResponseR: SnakePickle.Reader[ListVectorStoreFilesResponse] = SnakePickle.macroR[ListVectorStoreFilesResponse] } - + /** @param id + * Id of deleted object + * @param `object` + * vector_store.file.deleted + * @param deleted + * boolean describing whether or not operation was successful + * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistant]] + */ case class DeleteVectorStoreFileResponse( id: String, `object`: String, diff --git a/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala index ba1ca0fd..83ee9ef6 100644 --- a/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala +++ b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala @@ -7,6 +7,15 @@ object VectorStoreFileFixture { | "file_id": "file_1" |}""".stripMargin + val jsonListRequest: String = + """{ + | "limit": 30, + | "order": "asc", + | "after": "111", + | "before": "222", + | "filter": "in_progress" + |}""".stripMargin + val jsonObject: String = """{ | "id": "vsf_1", @@ -31,4 +40,41 @@ object VectorStoreFileFixture { | "message": "Failed" | } |}""".stripMargin + + val jsonList: String = + """{ + | "object": "list", + | "data": [ + | { + | "id": "vsf_1", + | "object": "vector_store.file", + | "usage_bytes" : 123456, + | "status": "in_progress", + | "created_at": 1698107661, + | "vector_store_id": "vs_1" + | }, + | { + | "id": "vsf_2", + | "object": "vector_store.file", + | "usage_bytes" : 1234567, + | "status": "completed", + | "created_at": 1698107661, + | "vector_store_id": "vs_1", + | "last_error": { + | "code": "rate_limit_exceeded", + | "message": "Failed2" + | } + | } + | ], + | "first_id": "vsf_1", + | "last_id": "vsf_2", + | "has_more": true + |}""".stripMargin + + val jsonDelete: String = + """{ + | "id": "file_abc123", + | "object": "vector_store.file.deleted", + | "deleted": true + |}""".stripMargin } diff --git a/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala index 9c5a3113..e0e9c7d6 100644 --- a/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala +++ b/core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala @@ -18,6 +18,15 @@ object VectorStoreFixture { | } |}""".stripMargin + val jsonModify: String = + """{ + | "name": "vs_3", + | "expires_after": { + | "anchor": "2322", + | "days": 5 + | } + |}""".stripMargin + val jsonObject: String = """{ | "id": "vs_1", @@ -38,4 +47,51 @@ object VectorStoreFixture { | "metadata": {}, | "last_used_at": 1698107681 |}""".stripMargin + + val jsonList: String = + """{ + |"object": "list", + | "data": [ + | { + | "id": "vs_abc123", + | "object": "vector_store", + | "created_at": 1699061776, + | "name": "Support FAQ", + | "usage_bytes": 139920, + | "status": "completed", + | "file_counts": { + | "in_progress": 0, + | "completed": 3, + | "failed": 0, + | "cancelled": 0, + | "total": 3 + | } + | }, + | { + | "id": "vs_abc456", + | "object": "vector_store", + | "created_at": 1699061776, + | "name": "Support FAQ v2", + | "usage_bytes": 139921, + | "status": "in_progress", + | "file_counts": { + | "in_progress": 1, + | "completed": 2, + | "failed": 2, + | "cancelled": 1, + | "total": 6 + | } + | } + | ], + | "first_id": "vs_abc123", + | "last_id": "vs_abc456", + | "has_more": false + |}""".stripMargin + + val jsonDelete: String = + """{ + | "id": "vs_abc123", + | "object": "vector_store.deleted", + | "deleted": true + |}""".stripMargin } diff --git a/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala b/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala index 0815e8b3..c499dedc 100644 --- a/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala +++ b/core/src/test/scala/sttp/openai/requests/vectorstore/VectorStoreDataSpec.scala @@ -5,8 +5,8 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.openai.fixtures.VectorStoreFixture import sttp.openai.json.{SnakePickle, SttpUpickleApiExtension} -import sttp.openai.requests.vectorstore.VectorStoreRequestBody.CreateVectorStoreBody -import sttp.openai.requests.vectorstore.VectorStoreResponseData.{FileCounts, InProgress, VectorStore} +import sttp.openai.requests.vectorstore.VectorStoreRequestBody.{CreateVectorStoreBody, ModifyVectorStoreBody} +import sttp.openai.requests.vectorstore.VectorStoreResponseData.{Completed, DeleteVectorStoreResponse, FileCounts, InProgress, ListVectorStoresResponse, VectorStore} class VectorStoreDataSpec extends AnyFlatSpec with Matchers with EitherValues { @@ -43,7 +43,23 @@ class VectorStoreDataSpec extends AnyFlatSpec with Matchers with EitherValues { serializedJson shouldBe jsonRequest } - "Given create vector store mode" should "be properly deserialized from Json" in { + "Given modify vector store request" should "be properly serialized to Json" in { + // given + val givenRequest = ModifyVectorStoreBody( + name = Some("vs_3"), + expiresAfter = Some(ExpiresAfter("2322", 5)) + ) + + val jsonRequest: ujson.Value = ujson.read(VectorStoreFixture.jsonModify) + + // when + val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) + + // then + serializedJson shouldBe jsonRequest + } + + "Vector store object" should "be properly deserialized from Json" in { import sttp.openai.requests.vectorstore.VectorStoreResponseData.VectorStore._ // given val givenResponse = VectorStore( @@ -67,4 +83,63 @@ class VectorStoreDataSpec extends AnyFlatSpec with Matchers with EitherValues { // then serializedJson.value shouldBe givenResponse } -} \ No newline at end of file + + "List of vector stores" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.VectorStoreResponseData.ListVectorStoresResponse._ + // given + + val first = VectorStore( + id = "vs_abc123", + `object` = "vector_store", + createdAt = 1699061776, + name = "Support FAQ", + usageBytes = 139920, + status = Completed, + fileCounts = FileCounts(0, 3, 0, 0, 3) + ) + + val second = VectorStore( + id = "vs_abc456", + `object` = "vector_store", + createdAt = 1699061776, + name = "Support FAQ v2", + usageBytes = 139921, + status = InProgress, + fileCounts = FileCounts(1, 2, 2, 1, 6) + ) + val givenResponse = ListVectorStoresResponse( + `object` = "list", + data = Seq(first, second), + firstId = "vs_abc123", + lastId = "vs_abc456", + hasMore = false + ) + + val jsonResponse = VectorStoreFixture.jsonList + + // when + val serializedJson: Either[Exception, ListVectorStoresResponse] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } + + "Delete of vector stores response" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.VectorStoreResponseData.DeleteVectorStoreResponse._ + // given + + val givenResponse = DeleteVectorStoreResponse( + id = "vs_abc123", + `object` = "vector_store.deleted", + deleted = true + ) + + val jsonResponse = VectorStoreFixture.jsonDelete + + // when + val serializedJson: Either[Exception, DeleteVectorStoreResponse] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } +} diff --git a/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala b/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala index b9b69589..c580c2c0 100644 --- a/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala +++ b/core/src/test/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileDataSpec.scala @@ -5,8 +5,8 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.openai.fixtures.VectorStoreFileFixture import sttp.openai.json.{SnakePickle, SttpUpickleApiExtension} -import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{LastError, ServerError, VectorStoreFile} -import sttp.openai.requests.vectorstore.file.VectorStoreFileRequestBody.CreateVectorStoreFileBody +import sttp.openai.requests.vectorstore.file.VectorStoreFileRequestBody.{CreateVectorStoreFileBody, ListVectorStoreFilesBody} +import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.{DeleteVectorStoreFileResponse, LastError, ListVectorStoreFilesResponse, RateLimitExceeded, ServerError, VectorStoreFile} class VectorStoreFileDataSpec extends AnyFlatSpec with Matchers with EitherValues { @@ -25,6 +25,25 @@ class VectorStoreFileDataSpec extends AnyFlatSpec with Matchers with EitherValue serializedJson shouldBe jsonRequest } + "Vector store file search params" should "be properly serialized to Json" in { + // given + val givenRequest = ListVectorStoreFilesBody( + limit = 30, + order = "asc", + after = Some("111"), + before = Some("222"), + filter = Some(InProgress) + ) + + val jsonRequest: ujson.Value = ujson.read(VectorStoreFileFixture.jsonListRequest) + + // when + val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) + + // then + serializedJson shouldBe jsonRequest + } + "Vector store file response" should "be properly deserialized from Json" in { import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.VectorStoreFile._ // given @@ -34,7 +53,7 @@ class VectorStoreFileDataSpec extends AnyFlatSpec with Matchers with EitherValue createdAt = 1698107661, usageBytes = 123456, status = Completed, - vectorStoreId = "vs_1", + vectorStoreId = "vs_1" ) val jsonResponse = VectorStoreFileFixture.jsonObject @@ -65,4 +84,62 @@ class VectorStoreFileDataSpec extends AnyFlatSpec with Matchers with EitherValue // then serializedJson.value shouldBe givenResponse } -} \ No newline at end of file + + "Vector store file list response" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.ListVectorStoreFilesResponse._ + // given + val one = VectorStoreFile( + id = "vsf_1", + `object` = "vector_store.file", + createdAt = 1698107661, + usageBytes = 123456, + status = InProgress, + vectorStoreId = "vs_1", + lastError = None + ) + + val two = VectorStoreFile( + id = "vsf_2", + `object` = "vector_store.file", + createdAt = 1698107661, + usageBytes = 1234567, + status = Completed, + vectorStoreId = "vs_1", + lastError = Some(LastError(RateLimitExceeded, "Failed2")) + ) + + val givenResponse = ListVectorStoreFilesResponse( + `object` = "list", + data = Seq(one, two), + firstId = "vsf_1", + lastId = "vsf_2", + hasMore = true + ) + val jsonResponse = VectorStoreFileFixture.jsonList + + // when + val serializedJson: Either[Exception, ListVectorStoreFilesResponse] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } + + "Delete of vector store file response" should "be properly deserialized from Json" in { + import sttp.openai.requests.vectorstore.file.VectorStoreFileResponseData.DeleteVectorStoreFileResponse._ + // given + + val givenResponse = DeleteVectorStoreFileResponse( + id = "file_abc123", + `object` = "vector_store.file.deleted", + deleted = true + ) + + val jsonResponse = VectorStoreFileFixture.jsonDelete + + // when + val serializedJson: Either[Exception, DeleteVectorStoreFileResponse] = SttpUpickleApiExtension.deserializeJsonSnake.apply(jsonResponse) + + // then + serializedJson.value shouldBe givenResponse + } +} From 07d37974f9aa4c483ddc2631279546c415a238ba Mon Sep 17 00:00:00 2001 From: "bartlomiej.zylinski" Date: Fri, 17 May 2024 11:32:08 +0200 Subject: [PATCH 4/4] Fix docs --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 1fb666d2..a2fa00e6 100644 --- a/README.md +++ b/README.md @@ -147,7 +147,7 @@ import sttp.openai.requests.completions.chat.message._ object Main extends IOApp { override def run(args: List[String]): IO[ExitCode] = { // Read your API secret-key from env variables - private val apiKey = System.getenv("openai-key") + val apiKey = System.getenv("openai-key") // Create an instance of OpenAISyncClient providing your API secret-key val openAI: OpenAI = new OpenAI(apiKey, uri"https://api.groq.com/openai/v1") @@ -220,7 +220,7 @@ import sttp.openai.requests.completions.chat.message._ object Main extends IOApp { override def run(args: List[String]): IO[ExitCode] = { // Read your API secret-key from env variables - private val apiKey = System.getenv("openai-key") + val apiKey = System.getenv("openai-key") // Create an instance of OpenAISyncClient providing your API secret-key val openAI: OpenAI = new OpenAI(apiKey) @@ -295,7 +295,7 @@ import sttp.openai.requests.completions.chat.message._ object Main extends IOApp { override def run(args: List[String]): IO[ExitCode] = { // Read your API secret-key from env variables - private val apiKey = System.getenv("openai-key") + val apiKey = System.getenv("openai-key") // Create an instance of OpenAISyncClient providing your API secret-key val openAI: OpenAI = new OpenAI(apiKey)