diff --git a/.gitignore b/.gitignore index 0d4aa54e..4ece9319 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ Cargo.lock data *.mp3 + +*.py \ No newline at end of file diff --git a/async-openai/Cargo.toml b/async-openai/Cargo.toml index 354dae89..a05644c0 100644 --- a/async-openai/Cargo.toml +++ b/async-openai/Cargo.toml @@ -50,6 +50,8 @@ derive_builder = "0.20.2" secrecy = { version = "0.10.3", features = ["serde"] } bytes = "1.9.0" eventsource-stream = "0.2.3" +serde_urlencoded = "0.7.1" +url = "2.5" tokio-tungstenite = { version = "0.26.1", optional = true, default-features = false } hmac = { version = "0.12", optional = true, default-features = false} sha2 = { version = "0.10", optional = true, default-features = false } diff --git a/async-openai/README.md b/async-openai/README.md index 823df5d3..3a266ba8 100644 --- a/async-openai/README.md +++ b/async-openai/README.md @@ -14,7 +14,7 @@
-Logo created by this repo itself +Logo created by this repo itself
## Overview @@ -32,15 +32,16 @@ | **Realtime** | Realtime Calls, Client secrets, Client events, Server events | | **Chat Completions** | Chat Completions, Streaming | | **Assistants** (Beta) | Assistants, Threads, Messages, Runs, Run steps, Streaming | -| **Administration** | Administration, Admin API Keys, Invites, Users, Projects, Project users, Project service accounts, Project API keys, Project rate limits, Audit logs, Usage, Certificates | +| **Administration** | Admin API Keys, Invites, Users, Projects, Project users, Project service accounts, Project API keys, Project rate limits, Audit logs, Usage, Certificates | | **Legacy** | Completions | Features that makes `async-openai` unique: - Bring your own custom types for Request or Response objects. -- SSE streaming on available APIs +- SSE streaming on available APIs. +- Customize query and headers per request, customize headers globally. - Requests (except SSE streaming) including form submissions are retried with exponential backoff when [rate limited](https://platform.openai.com/docs/guides/rate-limits). - Ergonomic builder pattern for all request objects. -- Microsoft Azure OpenAI Service (only for APIs matching OpenAI spec) +- Microsoft Azure OpenAI Service (only for APIs matching OpenAI spec). ## Usage diff --git a/async-openai/src/admin_api_keys.rs b/async-openai/src/admin_api_keys.rs index 20b58a53..ce90b4a3 100644 --- a/async-openai/src/admin_api_keys.rs +++ b/async-openai/src/admin_api_keys.rs @@ -1,12 +1,10 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::api_keys::{ AdminApiKey, AdminApiKeyDeleteResponse, ApiKeyList, CreateAdminApiKeyRequest, }, - Client, + Client, RequestOptions, }; /// Admin API keys enable Organization Owners to programmatically manage various aspects of their @@ -14,21 +12,22 @@ use crate::{ /// allowing you to automate organization management tasks. pub struct AdminAPIKeys<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> AdminAPIKeys<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// List all organization and project API keys. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query("/organization/admin_api_keys", &query) + .get("/organization/admin_api_keys", &self.request_options) .await } @@ -38,7 +37,11 @@ impl<'c, C: Config> AdminAPIKeys<'c, C> { request: CreateAdminApiKeyRequest, ) -> Result { self.client - .post("/organization/admin_api_keys", request) + .post( + "/organization/admin_api_keys", + request, + &self.request_options, + ) .await } @@ -46,7 +49,10 @@ impl<'c, C: Config> AdminAPIKeys<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, key_id: &str) -> Result { self.client - .get(format!("/organization/admin_api_keys/{key_id}").as_str()) + .get( + format!("/organization/admin_api_keys/{key_id}").as_str(), + &self.request_options, + ) .await } @@ -54,7 +60,10 @@ impl<'c, C: Config> AdminAPIKeys<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, key_id: &str) -> Result { self.client - .delete(format!("/organization/admin_api_keys/{key_id}").as_str()) + .delete( + format!("/organization/admin_api_keys/{key_id}").as_str(), + &self.request_options, + ) .await } } diff --git a/async-openai/src/assistants.rs b/async-openai/src/assistants.rs index abca6d75..44672381 100644 --- a/async-openai/src/assistants.rs +++ b/async-openai/src/assistants.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,7 +5,7 @@ use crate::{ AssistantObject, CreateAssistantRequest, DeleteAssistantResponse, ListAssistantsResponse, ModifyAssistantRequest, }, - Client, + Client, RequestOptions, }; /// Build assistants that can call models and use tools to perform tasks. @@ -15,11 +13,15 @@ use crate::{ /// [Get started with the Assistants API](https://platform.openai.com/docs/assistants) pub struct Assistants<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Assistants<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Create an assistant with a model and instructions. @@ -28,14 +30,19 @@ impl<'c, C: Config> Assistants<'c, C> { &self, request: CreateAssistantRequest, ) -> Result { - self.client.post("/assistants", request).await + self.client + .post("/assistants", request, &self.request_options) + .await } /// Retrieves an assistant. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, assistant_id: &str) -> Result { self.client - .get(&format!("/assistants/{assistant_id}")) + .get( + &format!("/assistants/{assistant_id}"), + &self.request_options, + ) .await } @@ -47,7 +54,11 @@ impl<'c, C: Config> Assistants<'c, C> { request: ModifyAssistantRequest, ) -> Result { self.client - .post(&format!("/assistants/{assistant_id}"), request) + .post( + &format!("/assistants/{assistant_id}"), + request, + &self.request_options, + ) .await } @@ -55,16 +66,16 @@ impl<'c, C: Config> Assistants<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, assistant_id: &str) -> Result { self.client - .delete(&format!("/assistants/{assistant_id}")) + .delete( + &format!("/assistants/{assistant_id}"), + &self.request_options, + ) .await } /// Returns a list of assistants. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/assistants", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client.get("/assistants", &self.request_options).await } } diff --git a/async-openai/src/audio.rs b/async-openai/src/audio.rs index 8993eb25..027f42b4 100644 --- a/async-openai/src/audio.rs +++ b/async-openai/src/audio.rs @@ -1,14 +1,18 @@ -use crate::{config::Config, Client, Speech, Transcriptions, Translations}; +use crate::{config::Config, Client, RequestOptions, Speech, Transcriptions, Translations}; /// Turn audio into text or text into audio. /// Related guide: [Speech to text](https://platform.openai.com/docs/guides/speech-to-text) pub struct Audio<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Audio<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// APIs in Speech group. diff --git a/async-openai/src/audit_logs.rs b/async-openai/src/audit_logs.rs index 7f09f9df..95ac7e10 100644 --- a/async-openai/src/audit_logs.rs +++ b/async-openai/src/audit_logs.rs @@ -1,7 +1,6 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::audit_logs::ListAuditLogsResponse, Client, + RequestOptions, }; /// Logs of user actions and configuration changes within this organization. @@ -9,21 +8,22 @@ use crate::{ /// Once activated, for security reasons, logging cannot be deactivated. pub struct AuditLogs<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> AuditLogs<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// List user actions and configuration changes within this organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn get(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn get(&self) -> Result { self.client - .get_with_query("/organization/audit_logs", &query) + .get("/organization/audit_logs", &self.request_options) .await } } diff --git a/async-openai/src/batches.rs b/async-openai/src/batches.rs index efa5b0da..968ed6e6 100644 --- a/async-openai/src/batches.rs +++ b/async-openai/src/batches.rs @@ -1,10 +1,8 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::batches::{Batch, BatchRequest, ListBatchesResponse}, - Client, + Client, RequestOptions, }; /// Create large batches of API requests for asynchronous processing. The Batch API returns completions within 24 hours for a 50% discount. @@ -12,32 +10,37 @@ use crate::{ /// Related guide: [Batch](https://platform.openai.com/docs/guides/batch) pub struct Batches<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Batches<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates and executes a batch from an uploaded file of requests #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: BatchRequest) -> Result { - self.client.post("/batches", request).await + self.client + .post("/batches", request, &self.request_options) + .await } /// List your organization's batches. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/batches", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client.get("/batches", &self.request_options).await } /// Retrieves a batch. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, batch_id: &str) -> Result { - self.client.get(&format!("/batches/{batch_id}")).await + self.client + .get(&format!("/batches/{batch_id}"), &self.request_options) + .await } /// Cancels an in-progress batch. The batch will be in status `cancelling` for up to 10 minutes, before changing to `cancelled`, where it will have partial results (if any) available in the output file. @@ -47,6 +50,7 @@ impl<'c, C: Config> Batches<'c, C> { .post( &format!("/batches/{batch_id}/cancel"), serde_json::json!({}), + &self.request_options, ) .await } diff --git a/async-openai/src/certificates.rs b/async-openai/src/certificates.rs index 44048fc5..be6b2b2d 100644 --- a/async-openai/src/certificates.rs +++ b/async-openai/src/certificates.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,33 +5,31 @@ use crate::{ Certificate, DeleteCertificateResponse, ListCertificatesResponse, ModifyCertificateRequest, ToggleCertificatesRequest, UploadCertificateRequest, }, - Client, + Client, RequestOptions, }; /// Certificates enable Mutual TLS (mTLS) authentication for your organization. /// Manage certificates at the organization level. pub struct Certificates<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Certificates<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } // Organization-level certificate operations /// List all certificates for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_organization( - &self, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list_organization(&self) -> Result { self.client - .get_with_query("/organization/certificates", &query) + .get("/organization/certificates", &self.request_options) .await } @@ -44,7 +40,7 @@ impl<'c, C: Config> Certificates<'c, C> { request: UploadCertificateRequest, ) -> Result { self.client - .post("/organization/certificates", request) + .post("/organization/certificates", request, &self.request_options) .await } @@ -55,7 +51,11 @@ impl<'c, C: Config> Certificates<'c, C> { request: ToggleCertificatesRequest, ) -> Result { self.client - .post("/organization/certificates/activate", request) + .post( + "/organization/certificates/activate", + request, + &self.request_options, + ) .await } @@ -66,7 +66,11 @@ impl<'c, C: Config> Certificates<'c, C> { request: ToggleCertificatesRequest, ) -> Result { self.client - .post("/organization/certificates/deactivate", request) + .post( + "/organization/certificates/deactivate", + request, + &self.request_options, + ) .await } @@ -74,23 +78,9 @@ impl<'c, C: Config> Certificates<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, certificate_id: &str) -> Result { self.client - .get(format!("/organization/certificates/{certificate_id}").as_str()) - .await - } - - /// Retrieve a single certificate with optional include parameters. - pub async fn retrieve_with_query( - &self, - certificate_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { - self.client - .get_with_query( + .get( format!("/organization/certificates/{certificate_id}").as_str(), - query, + &self.request_options, ) .await } @@ -106,6 +96,7 @@ impl<'c, C: Config> Certificates<'c, C> { .post( format!("/organization/certificates/{certificate_id}").as_str(), request, + &self.request_options, ) .await } @@ -118,7 +109,10 @@ impl<'c, C: Config> Certificates<'c, C> { certificate_id: &str, ) -> Result { self.client - .delete(format!("/organization/certificates/{certificate_id}").as_str()) + .delete( + format!("/organization/certificates/{certificate_id}").as_str(), + &self.request_options, + ) .await } } diff --git a/async-openai/src/chat.rs b/async-openai/src/chat.rs index e826e4f5..74b15914 100644 --- a/async-openai/src/chat.rs +++ b/async-openai/src/chat.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -8,7 +6,7 @@ use crate::{ ChatCompletionResponseStream, CreateChatCompletionRequest, CreateChatCompletionResponse, UpdateChatCompletionRequest, }, - Client, + Client, RequestOptions, }; /// Given a list of messages comprising a conversation, the model will return a response. @@ -16,11 +14,15 @@ use crate::{ /// Related guide: [Chat Completions](https://platform.openai.com/docs/guides/text-generation) pub struct Chat<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Chat<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates a model response for the given chat conversation. @@ -48,7 +50,9 @@ impl<'c, C: Config> Chat<'c, C> { )); } } - self.client.post("/chat/completions", request).await + self.client + .post("/chat/completions", request, &self.request_options) + .await } /// Creates a completion for the chat message. @@ -81,18 +85,18 @@ impl<'c, C: Config> Chat<'c, C> { request.stream = Some(true); } - Ok(self.client.post_stream("/chat/completions", request).await) + Ok(self + .client + .post_stream("/chat/completions", request, &self.request_options) + .await) } /// List stored Chat Completions. Only Chat Completions that have been stored /// with the `store` parameter set to `true` will be returned. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query("/chat/completions", &query) + .get("/chat/completions", &self.request_options) .await } @@ -104,7 +108,10 @@ impl<'c, C: Config> Chat<'c, C> { completion_id: &str, ) -> Result { self.client - .get(&format!("/chat/completions/{completion_id}")) + .get( + &format!("/chat/completions/{completion_id}"), + &self.request_options, + ) .await } @@ -122,7 +129,11 @@ impl<'c, C: Config> Chat<'c, C> { request: UpdateChatCompletionRequest, ) -> Result { self.client - .post(&format!("/chat/completions/{completion_id}"), request) + .post( + &format!("/chat/completions/{completion_id}"), + request, + &self.request_options, + ) .await } @@ -131,24 +142,23 @@ impl<'c, C: Config> Chat<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, completion_id: &str) -> Result { self.client - .delete(&format!("/chat/completions/{completion_id}")) + .delete( + &format!("/chat/completions/{completion_id}"), + &self.request_options, + ) .await } /// Get a list of messages for the specified chat completion. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn messages( + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn messages( &self, completion_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + ) -> Result { self.client - .get_with_query( + .get( &format!("/chat/completions/{completion_id}/messages"), - &query, + &self.request_options, ) .await } diff --git a/async-openai/src/chatkit.rs b/async-openai/src/chatkit.rs index 3eb795fb..57a362fd 100644 --- a/async-openai/src/chatkit.rs +++ b/async-openai/src/chatkit.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,7 +5,7 @@ use crate::{ ChatSessionResource, CreateChatSessionBody, DeletedThreadResource, ThreadItemListResource, ThreadListResource, ThreadResource, }, - Client, + Client, RequestOptions, }; /// ChatKit API for managing sessions and threads. @@ -15,11 +13,15 @@ use crate::{ /// Related guide: [ChatKit](https://platform.openai.com/docs/api-reference/chatkit) pub struct Chatkit<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Chatkit<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Access sessions API. @@ -36,11 +38,15 @@ impl<'c, C: Config> Chatkit<'c, C> { /// ChatKit sessions API. pub struct ChatkitSessions<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ChatkitSessions<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Create a ChatKit session. @@ -49,7 +55,9 @@ impl<'c, C: Config> ChatkitSessions<'c, C> { &self, request: CreateChatSessionBody, ) -> Result { - self.client.post("/chatkit/sessions", request).await + self.client + .post("/chatkit/sessions", request, &self.request_options) + .await } /// Cancel a ChatKit session. @@ -59,6 +67,7 @@ impl<'c, C: Config> ChatkitSessions<'c, C> { .post( &format!("/chatkit/sessions/{session_id}/cancel"), serde_json::json!({}), + &self.request_options, ) .await } @@ -67,27 +76,33 @@ impl<'c, C: Config> ChatkitSessions<'c, C> { /// ChatKit threads API. pub struct ChatkitThreads<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ChatkitThreads<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// List ChatKit threads. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/chatkit/threads", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client + .get("/chatkit/threads", &self.request_options) + .await } /// Retrieve a ChatKit thread. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, thread_id: &str) -> Result { self.client - .get(&format!("/chatkit/threads/{thread_id}")) + .get( + &format!("/chatkit/threads/{thread_id}"), + &self.request_options, + ) .await } @@ -95,22 +110,21 @@ impl<'c, C: Config> ChatkitThreads<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, thread_id: &str) -> Result { self.client - .delete(&format!("/chatkit/threads/{thread_id}")) + .delete( + &format!("/chatkit/threads/{thread_id}"), + &self.request_options, + ) .await } /// List ChatKit thread items. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_items( - &self, - thread_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn list_items(&self, thread_id: &str) -> Result { self.client - .get_with_query(&format!("/chatkit/threads/{thread_id}/items"), &query) + .get( + &format!("/chatkit/threads/{thread_id}/items"), + &self.request_options, + ) .await } } diff --git a/async-openai/src/client.rs b/async-openai/src/client.rs index ec8363dd..7f0e0a4e 100644 --- a/async-openai/src/client.rs +++ b/async-openai/src/client.rs @@ -16,7 +16,7 @@ use crate::{ moderation::Moderations, traits::AsyncTryFrom, Assistants, Audio, Batches, Chat, Completions, Containers, Conversations, Embeddings, Evals, - FineTuning, Models, Responses, Threads, Uploads, Usage, VectorStores, Videos, + FineTuning, Models, RequestOptions, Responses, Threads, Uploads, Usage, VectorStores, Videos, }; #[cfg(feature = "realtime")] @@ -198,88 +198,87 @@ impl Client { } /// Make a GET request to {path} and deserialize the response body - pub(crate) async fn get(&self, path: &str) -> Result + pub(crate) async fn get( + &self, + path: &str, + request_options: &RequestOptions, + ) -> Result where O: DeserializeOwned, { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .get(self.config.url(path)) .query(&self.config.query()) - .headers(self.config.headers()) - .build()?) - }; + .headers(self.config.headers()); - self.execute(request_maker).await - } + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } - /// Make a GET request to {path} with given Query and deserialize the response body - pub(crate) async fn get_with_query(&self, path: &str, query: &Q) -> Result - where - O: DeserializeOwned, - Q: Serialize + ?Sized, - { - let request_maker = || async { - Ok(self - .http_client - .get(self.config.url(path)) - .query(&self.config.query()) - .query(query) - .headers(self.config.headers()) - .build()?) + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; self.execute(request_maker).await } /// Make a DELETE request to {path} and deserialize the response body - pub(crate) async fn delete(&self, path: &str) -> Result + pub(crate) async fn delete( + &self, + path: &str, + request_options: &RequestOptions, + ) -> Result where O: DeserializeOwned, { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .delete(self.config.url(path)) .query(&self.config.query()) - .headers(self.config.headers()) - .build()?) - }; + .headers(self.config.headers()); - self.execute(request_maker).await - } + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } - /// Make a GET request to {path} and return the response body - pub(crate) async fn get_raw(&self, path: &str) -> Result<(Bytes, HeaderMap), OpenAIError> { - let request_maker = || async { - Ok(self - .http_client - .get(self.config.url(path)) - .query(&self.config.query()) - .headers(self.config.headers()) - .build()?) + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; - self.execute_raw(request_maker).await + self.execute(request_maker).await } - pub(crate) async fn get_raw_with_query( + /// Make a GET request to {path} and return the response body + pub(crate) async fn get_raw( &self, path: &str, - query: &Q, - ) -> Result<(Bytes, HeaderMap), OpenAIError> - where - Q: Serialize + ?Sized, - { + request_options: &RequestOptions, + ) -> Result<(Bytes, HeaderMap), OpenAIError> { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .get(self.config.url(path)) .query(&self.config.query()) - .query(query) - .headers(self.config.headers()) - .build()?) + .headers(self.config.headers()); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; self.execute_raw(request_maker).await @@ -290,37 +289,61 @@ impl Client { &self, path: &str, request: I, + request_options: &RequestOptions, ) -> Result<(Bytes, HeaderMap), OpenAIError> where I: Serialize, { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .headers(self.config.headers()) - .json(&request) - .build()?) + .json(&request); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; self.execute_raw(request_maker).await } /// Make a POST request to {path} and deserialize the response body - pub(crate) async fn post(&self, path: &str, request: I) -> Result + pub(crate) async fn post( + &self, + path: &str, + request: I, + request_options: &RequestOptions, + ) -> Result where I: Serialize, O: DeserializeOwned, { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .headers(self.config.headers()) - .json(&request) - .build()?) + .json(&request); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; self.execute(request_maker).await @@ -331,39 +354,63 @@ impl Client { &self, path: &str, form: F, + request_options: &RequestOptions, ) -> Result<(Bytes, HeaderMap), OpenAIError> where Form: AsyncTryFrom, F: Clone, { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .headers(self.config.headers()) - .multipart(
>::try_from(form.clone()).await?) - .build()?) + .multipart(>::try_from(form.clone()).await?); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; self.execute_raw(request_maker).await } /// POST a form at {path} and deserialize the response body - pub(crate) async fn post_form(&self, path: &str, form: F) -> Result + pub(crate) async fn post_form( + &self, + path: &str, + form: F, + request_options: &RequestOptions, + ) -> Result where O: DeserializeOwned, Form: AsyncTryFrom, F: Clone, { let request_maker = || async { - Ok(self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .headers(self.config.headers()) - .multipart(>::try_from(form.clone()).await?) - .build()?) + .multipart(>::try_from(form.clone()).await?); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + Ok(request_builder.build()?) }; self.execute(request_maker).await @@ -373,6 +420,7 @@ impl Client { &self, path: &str, form: F, + request_options: &RequestOptions, ) -> Result> + Send>>, OpenAIError> where F: Clone, @@ -381,15 +429,22 @@ impl Client { { // Build and execute request manually since multipart::Form is not Clone // and .eventsource() requires cloneability - let response = self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .multipart(>::try_from(form.clone()).await?) - .headers(self.config.headers()) - .send() - .await - .map_err(OpenAIError::Reqwest)?; + .headers(self.config.headers()); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + let response = request_builder.send().await.map_err(OpenAIError::Reqwest)?; // Check for error status if !response.status().is_success() { @@ -519,19 +574,28 @@ impl Client { &self, path: &str, request: I, + request_options: &RequestOptions, ) -> Pin> + Send>> where I: Serialize, O: DeserializeOwned + std::marker::Send + 'static, { - let event_source = self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .headers(self.config.headers()) - .json(&request) - .eventsource() - .unwrap(); + .json(&request); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + let event_source = request_builder.eventsource().unwrap(); stream(event_source).await } @@ -540,20 +604,29 @@ impl Client { &self, path: &str, request: I, + request_options: &RequestOptions, event_mapper: impl Fn(eventsource_stream::Event) -> Result + Send + 'static, ) -> Pin> + Send>> where I: Serialize, O: DeserializeOwned + std::marker::Send + 'static, { - let event_source = self + let mut request_builder = self .http_client .post(self.config.url(path)) .query(&self.config.query()) .headers(self.config.headers()) - .json(&request) - .eventsource() - .unwrap(); + .json(&request); + + if let Some(headers) = request_options.headers() { + request_builder = request_builder.headers(headers.clone()); + } + + if !request_options.query().is_empty() { + request_builder = request_builder.query(request_options.query()); + } + + let event_source = request_builder.eventsource().unwrap(); stream_mapped_raw_events(event_source, event_mapper).await } diff --git a/async-openai/src/completion.rs b/async-openai/src/completion.rs index 432201c3..2d21b9cc 100644 --- a/async-openai/src/completion.rs +++ b/async-openai/src/completion.rs @@ -3,6 +3,7 @@ use crate::{ config::Config, error::OpenAIError, types::{CompletionResponseStream, CreateCompletionRequest, CreateCompletionResponse}, + RequestOptions, }; /// Given a prompt, the model will return one or more predicted completions, @@ -13,11 +14,15 @@ use crate::{ /// Related guide: [Legacy Completions](https://platform.openai.com/docs/guides/gpt/completions-api) pub struct Completions<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Completions<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates a completion for the provided prompt and parameters @@ -39,7 +44,9 @@ impl<'c, C: Config> Completions<'c, C> { )); } } - self.client.post("/completions", request).await + self.client + .post("/completions", request, &self.request_options) + .await } /// Creates a completion request for the provided prompt and parameters @@ -72,6 +79,9 @@ impl<'c, C: Config> Completions<'c, C> { request.stream = Some(true); } - Ok(self.client.post_stream("/completions", request).await) + Ok(self + .client + .post_stream("/completions", request, &self.request_options) + .await) } } diff --git a/async-openai/src/container_files.rs b/async-openai/src/container_files.rs index fd0f03d3..541a691e 100644 --- a/async-openai/src/container_files.rs +++ b/async-openai/src/container_files.rs @@ -1,5 +1,4 @@ use bytes::Bytes; -use serde::Serialize; use crate::{ config::Config, @@ -8,13 +7,14 @@ use crate::{ ContainerFileListResource, ContainerFileResource, CreateContainerFileRequest, DeleteContainerFileResponse, }, - Client, + Client, RequestOptions, }; /// Create and manage container files for use with the Code Interpreter tool. pub struct ContainerFiles<'c, C: Config> { client: &'c Client, container_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ContainerFiles<'c, C> { @@ -22,6 +22,7 @@ impl<'c, C: Config> ContainerFiles<'c, C> { Self { client, container_id: container_id.to_string(), + request_options: RequestOptions::new(), } } @@ -36,18 +37,22 @@ impl<'c, C: Config> ContainerFiles<'c, C> { request: CreateContainerFileRequest, ) -> Result { self.client - .post_form(&format!("/containers/{}/files", self.container_id), request) + .post_form( + &format!("/containers/{}/files", self.container_id), + request, + &self.request_options, + ) .await } /// List container files. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query(&format!("/containers/{}/files", self.container_id), &query) + .get( + &format!("/containers/{}/files", self.container_id), + &self.request_options, + ) .await } @@ -55,7 +60,10 @@ impl<'c, C: Config> ContainerFiles<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, file_id: &str) -> Result { self.client - .get(format!("/containers/{}/files/{file_id}", self.container_id).as_str()) + .get( + format!("/containers/{}/files/{file_id}", self.container_id).as_str(), + &self.request_options, + ) .await } @@ -63,7 +71,10 @@ impl<'c, C: Config> ContainerFiles<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, file_id: &str) -> Result { self.client - .delete(format!("/containers/{}/files/{file_id}", self.container_id).as_str()) + .delete( + format!("/containers/{}/files/{file_id}", self.container_id).as_str(), + &self.request_options, + ) .await } @@ -71,7 +82,10 @@ impl<'c, C: Config> ContainerFiles<'c, C> { pub async fn content(&self, file_id: &str) -> Result { let (bytes, _headers) = self .client - .get_raw(format!("/containers/{}/files/{file_id}/content", self.container_id).as_str()) + .get_raw( + format!("/containers/{}/files/{file_id}/content", self.container_id).as_str(), + &self.request_options, + ) .await?; Ok(bytes) } diff --git a/async-openai/src/containers.rs b/async-openai/src/containers.rs index 7254bccf..c5b58bff 100644 --- a/async-openai/src/containers.rs +++ b/async-openai/src/containers.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, container_files::ContainerFiles, @@ -7,16 +5,20 @@ use crate::{ types::containers::{ ContainerListResource, ContainerResource, CreateContainerRequest, DeleteContainerResponse, }, - Client, + Client, RequestOptions, }; pub struct Containers<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Containers<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// [ContainerFiles] API group @@ -30,23 +32,25 @@ impl<'c, C: Config> Containers<'c, C> { &self, request: CreateContainerRequest, ) -> Result { - self.client.post("/containers", request).await + self.client + .post("/containers", request, &self.request_options) + .await } /// List containers. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/containers", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client.get("/containers", &self.request_options).await } /// Retrieve a container. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, container_id: &str) -> Result { self.client - .get(format!("/containers/{container_id}").as_str()) + .get( + format!("/containers/{container_id}").as_str(), + &self.request_options, + ) .await } @@ -54,7 +58,10 @@ impl<'c, C: Config> Containers<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, container_id: &str) -> Result { self.client - .delete(format!("/containers/{container_id}").as_str()) + .delete( + format!("/containers/{container_id}").as_str(), + &self.request_options, + ) .await } } diff --git a/async-openai/src/conversation_items.rs b/async-openai/src/conversation_items.rs index f58cd162..e018e034 100644 --- a/async-openai/src/conversation_items.rs +++ b/async-openai/src/conversation_items.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,13 +5,14 @@ use crate::{ ConversationItem, ConversationItemList, ConversationResource, CreateConversationItemsRequest, }, - Client, + Client, RequestOptions, }; /// Conversation items represent items within a conversation. pub struct ConversationItems<'c, C: Config> { client: &'c Client, pub conversation_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ConversationItems<'c, C> { @@ -21,6 +20,7 @@ impl<'c, C: Config> ConversationItems<'c, C> { Self { client, conversation_id: conversation_id.into(), + request_options: RequestOptions::new(), } } @@ -34,20 +34,18 @@ impl<'c, C: Config> ConversationItems<'c, C> { .post( &format!("/conversations/{}/items", &self.conversation_id), request, + &self.request_options, ) .await } /// List all items for a conversation. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( &format!("/conversations/{}/items", &self.conversation_id), - &query, + &self.request_options, ) .await } @@ -56,10 +54,10 @@ impl<'c, C: Config> ConversationItems<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, item_id: &str) -> Result { self.client - .get(&format!( - "/conversations/{}/items/{item_id}", - &self.conversation_id - )) + .get( + &format!("/conversations/{}/items/{item_id}", &self.conversation_id), + &self.request_options, + ) .await } @@ -67,10 +65,10 @@ impl<'c, C: Config> ConversationItems<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, item_id: &str) -> Result { self.client - .delete(&format!( - "/conversations/{}/items/{item_id}", - &self.conversation_id - )) + .delete( + &format!("/conversations/{}/items/{item_id}", &self.conversation_id), + &self.request_options, + ) .await } } diff --git a/async-openai/src/conversations.rs b/async-openai/src/conversations.rs index 41c0cee8..87598ef6 100644 --- a/async-openai/src/conversations.rs +++ b/async-openai/src/conversations.rs @@ -6,16 +6,20 @@ use crate::{ ConversationResource, CreateConversationRequest, DeleteConversationResponse, UpdateConversationRequest, }, - Client, + Client, RequestOptions, }; pub struct Conversations<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Conversations<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// [ConversationItems] API group @@ -29,7 +33,9 @@ impl<'c, C: Config> Conversations<'c, C> { &self, request: CreateConversationRequest, ) -> Result { - self.client.post("/conversations", request).await + self.client + .post("/conversations", request, &self.request_options) + .await } /// Retrieves a conversation. @@ -39,7 +45,10 @@ impl<'c, C: Config> Conversations<'c, C> { conversation_id: &str, ) -> Result { self.client - .get(&format!("/conversations/{conversation_id}")) + .get( + &format!("/conversations/{conversation_id}"), + &self.request_options, + ) .await } @@ -50,7 +59,10 @@ impl<'c, C: Config> Conversations<'c, C> { conversation_id: &str, ) -> Result { self.client - .delete(&format!("/conversations/{conversation_id}")) + .delete( + &format!("/conversations/{conversation_id}"), + &self.request_options, + ) .await } @@ -62,7 +74,11 @@ impl<'c, C: Config> Conversations<'c, C> { request: UpdateConversationRequest, ) -> Result { self.client - .post(&format!("/conversations/{conversation_id}"), request) + .post( + &format!("/conversations/{conversation_id}"), + request, + &self.request_options, + ) .await } } diff --git a/async-openai/src/embedding.rs b/async-openai/src/embedding.rs index 7b7f4395..a1975a77 100644 --- a/async-openai/src/embedding.rs +++ b/async-openai/src/embedding.rs @@ -4,7 +4,7 @@ use crate::{ types::embeddings::{ CreateBase64EmbeddingResponse, CreateEmbeddingRequest, CreateEmbeddingResponse, }, - Client, + Client, RequestOptions, }; #[cfg(not(feature = "byot"))] @@ -16,11 +16,15 @@ use crate::types::embeddings::EncodingFormat; /// Related guide: [Embeddings](https://platform.openai.com/docs/guides/embeddings) pub struct Embeddings<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Embeddings<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates an embedding vector representing the input text. @@ -39,7 +43,9 @@ impl<'c, C: Config> Embeddings<'c, C> { )); } } - self.client.post("/embeddings", request).await + self.client + .post("/embeddings", request, &self.request_options) + .await } /// Creates an embedding vector representing the input text. @@ -60,7 +66,9 @@ impl<'c, C: Config> Embeddings<'c, C> { )); } } - self.client.post("/embeddings", request).await + self.client + .post("/embeddings", request, &self.request_options) + .await } } diff --git a/async-openai/src/eval_run_output_items.rs b/async-openai/src/eval_run_output_items.rs index 7e89b8e2..e9dc14c0 100644 --- a/async-openai/src/eval_run_output_items.rs +++ b/async-openai/src/eval_run_output_items.rs @@ -1,16 +1,15 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::evals::{EvalRunOutputItem, EvalRunOutputItemList}, - Client, + Client, RequestOptions, }; pub struct EvalRunOutputItems<'c, C: Config> { client: &'c Client, pub eval_id: String, pub run_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> EvalRunOutputItems<'c, C> { @@ -19,19 +18,17 @@ impl<'c, C: Config> EvalRunOutputItems<'c, C> { client, eval_id: eval_id.into(), run_id: run_id.into(), + request_options: RequestOptions::new(), } } /// Get a list of output items for an evaluation run. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( &format!("/evals/{}/runs/{}/output_items", self.eval_id, self.run_id), - &query, + &self.request_options, ) .await } @@ -40,10 +37,13 @@ impl<'c, C: Config> EvalRunOutputItems<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, output_item_id: &str) -> Result { self.client - .get(&format!( - "/evals/{}/runs/{}/output_items/{}", - self.eval_id, self.run_id, output_item_id - )) + .get( + &format!( + "/evals/{}/runs/{}/output_items/{}", + self.eval_id, self.run_id, output_item_id + ), + &self.request_options, + ) .await } } diff --git a/async-openai/src/eval_runs.rs b/async-openai/src/eval_runs.rs index f19e6dc7..590e4ec9 100644 --- a/async-openai/src/eval_runs.rs +++ b/async-openai/src/eval_runs.rs @@ -1,16 +1,15 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, eval_run_output_items::EvalRunOutputItems, types::evals::{CreateEvalRunRequest, DeleteEvalRunResponse, EvalRun, EvalRunList}, - Client, + Client, RequestOptions, }; pub struct EvalRuns<'c, C: Config> { client: &'c Client, pub eval_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> EvalRuns<'c, C> { @@ -18,6 +17,7 @@ impl<'c, C: Config> EvalRuns<'c, C> { Self { client, eval_id: eval_id.into(), + request_options: RequestOptions::new(), } } @@ -27,13 +27,13 @@ impl<'c, C: Config> EvalRuns<'c, C> { } /// Get a list of runs for an evaluation. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query(&format!("/evals/{}/runs", self.eval_id), &query) + .get( + &format!("/evals/{}/runs", self.eval_id), + &self.request_options, + ) .await } @@ -41,7 +41,11 @@ impl<'c, C: Config> EvalRuns<'c, C> { #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: CreateEvalRunRequest) -> Result { self.client - .post(&format!("/evals/{}/runs", self.eval_id), request) + .post( + &format!("/evals/{}/runs", self.eval_id), + request, + &self.request_options, + ) .await } @@ -49,7 +53,10 @@ impl<'c, C: Config> EvalRuns<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, run_id: &str) -> Result { self.client - .get(&format!("/evals/{}/runs/{}", self.eval_id, run_id)) + .get( + &format!("/evals/{}/runs/{}", self.eval_id, run_id), + &self.request_options, + ) .await } @@ -60,6 +67,7 @@ impl<'c, C: Config> EvalRuns<'c, C> { .post( &format!("/evals/{}/runs/{}", self.eval_id, run_id), serde_json::json!({}), + &self.request_options, ) .await } @@ -68,7 +76,10 @@ impl<'c, C: Config> EvalRuns<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, run_id: &str) -> Result { self.client - .delete(&format!("/evals/{}/runs/{}", self.eval_id, run_id)) + .delete( + &format!("/evals/{}/runs/{}", self.eval_id, run_id), + &self.request_options, + ) .await } } diff --git a/async-openai/src/evals.rs b/async-openai/src/evals.rs index 1cc5aeac..00e8ac5c 100644 --- a/async-openai/src/evals.rs +++ b/async-openai/src/evals.rs @@ -1,22 +1,24 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, eval_runs::EvalRuns, types::evals::{CreateEvalRequest, DeleteEvalResponse, Eval, EvalList, UpdateEvalRequest}, - Client, + Client, RequestOptions, }; /// Create, manage, and run evals in the OpenAI platform. Related guide: /// [Evals](https://platform.openai.com/docs/guides/evals) pub struct Evals<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Evals<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// [EvalRuns] API group @@ -25,12 +27,9 @@ impl<'c, C: Config> Evals<'c, C> { } /// List evaluations for a project. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/evals", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client.get("/evals", &self.request_options).await } /// Create the structure of an evaluation that can be used to test a model's performance. @@ -40,13 +39,17 @@ impl<'c, C: Config> Evals<'c, C> { /// datasources. For more information, see the [Evals guide](https://platform.openai.com/docs/guides/evals). #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: CreateEvalRequest) -> Result { - self.client.post("/evals", request).await + self.client + .post("/evals", request, &self.request_options) + .await } /// Get an evaluation by ID. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, eval_id: &str) -> Result { - self.client.get(&format!("/evals/{eval_id}")).await + self.client + .get(&format!("/evals/{eval_id}"), &self.request_options) + .await } /// Update certain properties of an evaluation. @@ -57,13 +60,15 @@ impl<'c, C: Config> Evals<'c, C> { request: UpdateEvalRequest, ) -> Result { self.client - .post(&format!("/evals/{eval_id}"), request) + .post(&format!("/evals/{eval_id}"), request, &self.request_options) .await } /// Delete an evaluation. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, eval_id: &str) -> Result { - self.client.delete(&format!("/evals/{eval_id}")).await + self.client + .delete(&format!("/evals/{eval_id}"), &self.request_options) + .await } } diff --git a/async-openai/src/file.rs b/async-openai/src/file.rs index bce20415..f959b7cc 100644 --- a/async-openai/src/file.rs +++ b/async-openai/src/file.rs @@ -1,21 +1,24 @@ use bytes::Bytes; -use serde::Serialize; use crate::{ config::Config, error::OpenAIError, types::files::{CreateFileRequest, DeleteFileResponse, ListFilesResponse, OpenAIFile}, - Client, + Client, RequestOptions, }; /// Files are used to upload documents that can be used with features like Assistants and Fine-tuning. pub struct Files<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Files<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Upload a file that can be used across various endpoints. Individual files can be up to 512 MB, and the size of all files uploaded by one organization can be up to 1 TB. @@ -33,29 +36,30 @@ impl<'c, C: Config> Files<'c, C> { where_clause = "reqwest::multipart::Form: crate::traits::AsyncTryFrom", )] pub async fn create(&self, request: CreateFileRequest) -> Result { - self.client.post_form("/files", request).await + self.client + .post_form("/files", request, &self.request_options) + .await } /// Returns a list of files that belong to the user's organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/files", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client.get("/files", &self.request_options).await } /// Returns information about a specific file. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, file_id: &str) -> Result { - self.client.get(format!("/files/{file_id}").as_str()).await + self.client + .get(format!("/files/{file_id}").as_str(), &self.request_options) + .await } /// Delete a file. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, file_id: &str) -> Result { self.client - .delete(format!("/files/{file_id}").as_str()) + .delete(format!("/files/{file_id}").as_str(), &self.request_options) .await } @@ -63,7 +67,10 @@ impl<'c, C: Config> Files<'c, C> { pub async fn content(&self, file_id: &str) -> Result { let (bytes, _headers) = self .client - .get_raw(format!("/files/{file_id}/content").as_str()) + .get_raw( + format!("/files/{file_id}/content").as_str(), + &self.request_options, + ) .await?; Ok(bytes) } @@ -72,6 +79,7 @@ impl<'c, C: Config> Files<'c, C> { #[cfg(test)] mod tests { use crate::{ + traits::RequestOptionsBuilder, types::files::{ CreateFileRequestArgs, FileExpirationAfter, FileExpirationAfterAnchor, FilePurpose, }, @@ -109,7 +117,7 @@ mod tests { //assert_eq!(openai_file.status, Some("processed".to_owned())); // uploaded or processed let query = [("purpose", "fine-tune")]; - let list_files = client.files().list(&query).await.unwrap(); + let list_files = client.files().query(&query).unwrap().list().await.unwrap(); assert_eq!(list_files.data.into_iter().last().unwrap(), openai_file); diff --git a/async-openai/src/fine_tuning.rs b/async-openai/src/fine_tuning.rs index 4a4d271c..0ad89672 100644 --- a/async-openai/src/fine_tuning.rs +++ b/async-openai/src/fine_tuning.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -9,7 +7,7 @@ use crate::{ ListFineTuningCheckpointPermissionResponse, ListFineTuningJobCheckpointsResponse, ListFineTuningJobEventsResponse, ListPaginatedFineTuningJobsResponse, }, - Client, + Client, RequestOptions, }; /// Manage fine-tuning jobs to tailor a model to your specific training data. @@ -17,11 +15,15 @@ use crate::{ /// Related guide: [Fine-tune models](https://platform.openai.com/docs/guides/fine-tuning) pub struct FineTuning<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> FineTuning<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates a fine-tuning job which begins the process of creating a new model from a given dataset. @@ -35,20 +37,16 @@ impl<'c, C: Config> FineTuning<'c, C> { &self, request: CreateFineTuningJobRequest, ) -> Result { - self.client.post("/fine_tuning/jobs", request).await + self.client + .post("/fine_tuning/jobs", request, &self.request_options) + .await } /// List your organization's fine-tuning jobs - #[crate::byot(T0 = serde::Serialize, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_paginated( - &self, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list_paginated(&self) -> Result { self.client - .get_with_query("/fine_tuning/jobs", &query) + .get("/fine_tuning/jobs", &self.request_options) .await } @@ -58,7 +56,10 @@ impl<'c, C: Config> FineTuning<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, fine_tuning_job_id: &str) -> Result { self.client - .get(format!("/fine_tuning/jobs/{fine_tuning_job_id}").as_str()) + .get( + format!("/fine_tuning/jobs/{fine_tuning_job_id}").as_str(), + &self.request_options, + ) .await } @@ -69,6 +70,7 @@ impl<'c, C: Config> FineTuning<'c, C> { .post( format!("/fine_tuning/jobs/{fine_tuning_job_id}/cancel").as_str(), (), + &self.request_options, ) .await } @@ -80,6 +82,7 @@ impl<'c, C: Config> FineTuning<'c, C> { .post( format!("/fine_tuning/jobs/{fine_tuning_job_id}/pause").as_str(), (), + &self.request_options, ) .await } @@ -91,42 +94,35 @@ impl<'c, C: Config> FineTuning<'c, C> { .post( format!("/fine_tuning/jobs/{fine_tuning_job_id}/resume").as_str(), (), + &self.request_options, ) .await } /// Get status updates for a fine-tuning job. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_events( + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn list_events( &self, fine_tuning_job_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + ) -> Result { self.client - .get_with_query( + .get( format!("/fine_tuning/jobs/{fine_tuning_job_id}/events").as_str(), - &query, + &self.request_options, ) .await } /// List checkpoints for a fine-tuning job. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_checkpoints( + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn list_checkpoints( &self, fine_tuning_job_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + ) -> Result { self.client - .get_with_query( + .get( format!("/fine_tuning/jobs/{fine_tuning_job_id}/checkpoints").as_str(), - &query, + &self.request_options, ) .await } @@ -142,24 +138,21 @@ impl<'c, C: Config> FineTuning<'c, C> { format!("/fine_tuning/checkpoints/{fine_tuned_model_checkpoint}/permissions") .as_str(), request, + &self.request_options, ) .await } - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_checkpoint_permissions( + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn list_checkpoint_permissions( &self, fine_tuned_model_checkpoint: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + ) -> Result { self.client - .get_with_query( + .get( format!("/fine_tuning/checkpoints/{fine_tuned_model_checkpoint}/permissions") .as_str(), - &query, + &self.request_options, ) .await } @@ -174,6 +167,7 @@ impl<'c, C: Config> FineTuning<'c, C> { .delete( format!("/fine_tuning/checkpoints/{fine_tuned_model_checkpoint}/permissions/{permission_id}") .as_str(), + &self.request_options, ) .await } diff --git a/async-openai/src/image.rs b/async-openai/src/image.rs index 8516727a..f7586cfd 100644 --- a/async-openai/src/image.rs +++ b/async-openai/src/image.rs @@ -5,7 +5,7 @@ use crate::{ CreateImageEditRequest, CreateImageRequest, CreateImageVariationRequest, ImageEditStream, ImageGenStream, ImagesResponse, }, - Client, + Client, RequestOptions, }; /// Given a prompt and/or an input image, the model will generate a new image. @@ -13,11 +13,15 @@ use crate::{ /// Related guide: [Image generation](https://platform.openai.com/docs/guides/images) pub struct Images<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Images<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates an image given a prompt. @@ -26,7 +30,9 @@ impl<'c, C: Config> Images<'c, C> { &self, request: CreateImageRequest, ) -> Result { - self.client.post("/images/generations", request).await + self.client + .post("/images/generations", request, &self.request_options) + .await } /// Creates an image given a prompt. @@ -54,7 +60,7 @@ impl<'c, C: Config> Images<'c, C> { Ok(self .client - .post_stream("/images/generations", request) + .post_stream("/images/generations", request, &self.request_options) .await) } @@ -69,7 +75,9 @@ impl<'c, C: Config> Images<'c, C> { &self, request: CreateImageEditRequest, ) -> Result { - self.client.post_form("/images/edits", request).await + self.client + .post_form("/images/edits", request, &self.request_options) + .await } /// Creates an edited or extended image given one or more source images and a prompt. @@ -96,7 +104,9 @@ impl<'c, C: Config> Images<'c, C> { } request.stream = Some(true); } - self.client.post_form_stream("/images/edits", request).await + self.client + .post_form_stream("/images/edits", request, &self.request_options) + .await } /// Creates a variation of a given image. This endpoint only supports dall-e-2. @@ -109,6 +119,8 @@ impl<'c, C: Config> Images<'c, C> { &self, request: CreateImageVariationRequest, ) -> Result { - self.client.post_form("/images/variations", request).await + self.client + .post_form("/images/variations", request, &self.request_options) + .await } } diff --git a/async-openai/src/invites.rs b/async-openai/src/invites.rs index 3291b3b9..be28166d 100644 --- a/async-openai/src/invites.rs +++ b/async-openai/src/invites.rs @@ -1,30 +1,29 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::invites::{Invite, InviteDeleteResponse, InviteListResponse, InviteRequest}, - Client, + Client, RequestOptions, }; /// Invite and manage invitations for an organization. Invited users are automatically added to the Default project. pub struct Invites<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Invites<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Returns a list of invites in the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query("/organization/invites", &query) + .get("/organization/invites", &self.request_options) .await } @@ -32,21 +31,29 @@ impl<'c, C: Config> Invites<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, invite_id: &str) -> Result { self.client - .get(format!("/organization/invites/{invite_id}").as_str()) + .get( + format!("/organization/invites/{invite_id}").as_str(), + &self.request_options, + ) .await } /// Create an invite for a user to the organization. The invite must be accepted by the user before they have access to the organization. #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: InviteRequest) -> Result { - self.client.post("/organization/invites", request).await + self.client + .post("/organization/invites", request, &self.request_options) + .await } /// Delete an invite. If the invite has already been accepted, it cannot be deleted. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, invite_id: &str) -> Result { self.client - .delete(format!("/organization/invites/{invite_id}").as_str()) + .delete( + format!("/organization/invites/{invite_id}").as_str(), + &self.request_options, + ) .await } } diff --git a/async-openai/src/lib.rs b/async-openai/src/lib.rs index 846eff32..4ea77473 100644 --- a/async-openai/src/lib.rs +++ b/async-openai/src/lib.rs @@ -177,6 +177,7 @@ mod project_users; mod projects; #[cfg(feature = "realtime")] mod realtime; +mod request_options; mod responses; mod runs; mod speech; @@ -231,6 +232,7 @@ pub use project_users::ProjectUsers; pub use projects::Projects; #[cfg(feature = "realtime")] pub use realtime::Realtime; +pub use request_options::RequestOptions; pub use responses::Responses; pub use runs::Runs; pub use speech::Speech; diff --git a/async-openai/src/messages.rs b/async-openai/src/messages.rs index cd59bed4..30c14e1d 100644 --- a/async-openai/src/messages.rs +++ b/async-openai/src/messages.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,7 +5,7 @@ use crate::{ CreateMessageRequest, DeleteMessageResponse, ListMessagesResponse, MessageObject, ModifyMessageRequest, }, - Client, + Client, RequestOptions, }; /// Represents a message within a [thread](https://platform.openai.com/docs/api-reference/threads). @@ -15,6 +13,7 @@ pub struct Messages<'c, C: Config> { /// The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) to create a message for. pub thread_id: String, client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Messages<'c, C> { @@ -22,6 +21,7 @@ impl<'c, C: Config> Messages<'c, C> { Self { client, thread_id: thread_id.into(), + request_options: RequestOptions::new(), } } @@ -32,7 +32,11 @@ impl<'c, C: Config> Messages<'c, C> { request: CreateMessageRequest, ) -> Result { self.client - .post(&format!("/threads/{}/messages", self.thread_id), request) + .post( + &format!("/threads/{}/messages", self.thread_id), + request, + &self.request_options, + ) .await } @@ -40,10 +44,10 @@ impl<'c, C: Config> Messages<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, message_id: &str) -> Result { self.client - .get(&format!( - "/threads/{}/messages/{message_id}", - self.thread_id - )) + .get( + &format!("/threads/{}/messages/{message_id}", self.thread_id), + &self.request_options, + ) .await } @@ -58,28 +62,29 @@ impl<'c, C: Config> Messages<'c, C> { .post( &format!("/threads/{}/messages/{message_id}", self.thread_id), request, + &self.request_options, ) .await } /// Returns a list of messages for a given thread. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query(&format!("/threads/{}/messages", self.thread_id), &query) + .get( + &format!("/threads/{}/messages", self.thread_id), + &self.request_options, + ) .await } #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, message_id: &str) -> Result { self.client - .delete(&format!( - "/threads/{}/messages/{message_id}", - self.thread_id - )) + .delete( + &format!("/threads/{}/messages/{message_id}", self.thread_id), + &self.request_options, + ) .await } } diff --git a/async-openai/src/model.rs b/async-openai/src/model.rs index 8d56bcb4..13f861ce 100644 --- a/async-openai/src/model.rs +++ b/async-openai/src/model.rs @@ -2,7 +2,7 @@ use crate::{ config::Config, error::OpenAIError, types::models::{DeleteModelResponse, ListModelResponse, Model}, - Client, + Client, RequestOptions, }; /// List and describe the various models available in the API. @@ -10,32 +10,38 @@ use crate::{ /// models are available and the differences between them. pub struct Models<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Models<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Lists the currently available models, and provides basic information /// about each one such as the owner and availability. #[crate::byot(R = serde::de::DeserializeOwned)] pub async fn list(&self) -> Result { - self.client.get("/models").await + self.client.get("/models", &self.request_options).await } /// Retrieves a model instance, providing basic information about the model /// such as the owner and permissioning. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, model: &str) -> Result { - self.client.get(format!("/models/{model}").as_str()).await + self.client + .get(format!("/models/{model}").as_str(), &self.request_options) + .await } /// Delete a fine-tuned model. You must have the Owner role in your organization. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, model: &str) -> Result { self.client - .delete(format!("/models/{model}").as_str()) + .delete(format!("/models/{model}").as_str(), &self.request_options) .await } } diff --git a/async-openai/src/moderation.rs b/async-openai/src/moderation.rs index 2e900256..d20f5d0e 100644 --- a/async-openai/src/moderation.rs +++ b/async-openai/src/moderation.rs @@ -2,7 +2,7 @@ use crate::{ config::Config, error::OpenAIError, types::moderations::{CreateModerationRequest, CreateModerationResponse}, - Client, + Client, RequestOptions, }; /// Given text and/or image inputs, classifies if those inputs are potentially harmful across several categories. @@ -10,11 +10,15 @@ use crate::{ /// Related guide: [Moderations](https://platform.openai.com/docs/guides/moderation) pub struct Moderations<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Moderations<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Classifies if text and/or image inputs are potentially harmful. Learn @@ -24,6 +28,8 @@ impl<'c, C: Config> Moderations<'c, C> { &self, request: CreateModerationRequest, ) -> Result { - self.client.post("/moderations", request).await + self.client + .post("/moderations", request, &self.request_options) + .await } } diff --git a/async-openai/src/project_api_keys.rs b/async-openai/src/project_api_keys.rs index ce5d56c1..f1143697 100644 --- a/async-openai/src/project_api_keys.rs +++ b/async-openai/src/project_api_keys.rs @@ -1,12 +1,10 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::project_api_keys::{ ProjectApiKey, ProjectApiKeyDeleteResponse, ProjectApiKeyListResponse, }, - Client, + Client, RequestOptions, }; /// Manage API keys for a given project. Supports listing and deleting keys for users. @@ -14,6 +12,7 @@ use crate::{ pub struct ProjectAPIKeys<'c, C: Config> { client: &'c Client, pub project_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ProjectAPIKeys<'c, C> { @@ -21,19 +20,17 @@ impl<'c, C: Config> ProjectAPIKeys<'c, C> { Self { client, project_id: project_id.into(), + request_options: RequestOptions::new(), } } /// Returns a list of API keys in the project. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( format!("/organization/projects/{}/api_keys", self.project_id).as_str(), - &query, + &self.request_options, ) .await } @@ -48,6 +45,7 @@ impl<'c, C: Config> ProjectAPIKeys<'c, C> { self.project_id ) .as_str(), + &self.request_options, ) .await } @@ -62,6 +60,7 @@ impl<'c, C: Config> ProjectAPIKeys<'c, C> { self.project_id ) .as_str(), + &self.request_options, ) .await } diff --git a/async-openai/src/project_certificates.rs b/async-openai/src/project_certificates.rs index a7cc2dd1..ec89e08f 100644 --- a/async-openai/src/project_certificates.rs +++ b/async-openai/src/project_certificates.rs @@ -1,16 +1,15 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::certificates::{ListCertificatesResponse, ToggleCertificatesRequest}, - Client, + Client, RequestOptions, }; /// Manage certificates for a given project. Supports listing, activating, and deactivating certificates. pub struct ProjectCertificates<'c, C: Config> { client: &'c Client, pub project_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ProjectCertificates<'c, C> { @@ -18,18 +17,16 @@ impl<'c, C: Config> ProjectCertificates<'c, C> { Self { client, project_id: project_id.into(), + request_options: RequestOptions::new(), } } /// List all certificates for this project. - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( format!("/organization/projects/{}/certificates", self.project_id).as_str(), - query, + &self.request_options, ) .await } @@ -48,6 +45,7 @@ impl<'c, C: Config> ProjectCertificates<'c, C> { ) .as_str(), request, + &self.request_options, ) .await } @@ -66,6 +64,7 @@ impl<'c, C: Config> ProjectCertificates<'c, C> { ) .as_str(), request, + &self.request_options, ) .await } diff --git a/async-openai/src/project_rate_limits.rs b/async-openai/src/project_rate_limits.rs index 13c7e643..c01e59d0 100644 --- a/async-openai/src/project_rate_limits.rs +++ b/async-openai/src/project_rate_limits.rs @@ -1,18 +1,17 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::project_rate_limits::{ ProjectRateLimit, ProjectRateLimitListResponse, ProjectRateLimitUpdateRequest, }, - Client, + Client, RequestOptions, }; /// Manage rate limits for a given project. Supports listing and updating rate limits per model. pub struct ProjectRateLimits<'c, C: Config> { client: &'c Client, pub project_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ProjectRateLimits<'c, C> { @@ -20,19 +19,17 @@ impl<'c, C: Config> ProjectRateLimits<'c, C> { Self { client, project_id: project_id.into(), + request_options: RequestOptions::new(), } } /// Returns the rate limits per model for a project. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( format!("/organization/projects/{}/rate_limits", self.project_id).as_str(), - &query, + &self.request_options, ) .await } @@ -52,6 +49,7 @@ impl<'c, C: Config> ProjectRateLimits<'c, C> { ) .as_str(), request, + &self.request_options, ) .await } diff --git a/async-openai/src/project_service_accounts.rs b/async-openai/src/project_service_accounts.rs index 53548e21..cacb349c 100644 --- a/async-openai/src/project_service_accounts.rs +++ b/async-openai/src/project_service_accounts.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -8,7 +6,7 @@ use crate::{ ProjectServiceAccountCreateResponse, ProjectServiceAccountDeleteResponse, ProjectServiceAccountListResponse, }, - Client, + Client, RequestOptions, }; /// Manage service accounts within a project. A service account is a bot user that is not @@ -18,6 +16,7 @@ use crate::{ pub struct ProjectServiceAccounts<'c, C: Config> { client: &'c Client, pub project_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ProjectServiceAccounts<'c, C> { @@ -25,23 +24,21 @@ impl<'c, C: Config> ProjectServiceAccounts<'c, C> { Self { client, project_id: project_id.into(), + request_options: RequestOptions::new(), } } /// Returns a list of service accounts in the project. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( format!( "/organization/projects/{}/service_accounts", self.project_id ) .as_str(), - &query, + &self.request_options, ) .await } @@ -60,6 +57,7 @@ impl<'c, C: Config> ProjectServiceAccounts<'c, C> { ) .as_str(), request, + &self.request_options, ) .await } @@ -77,6 +75,7 @@ impl<'c, C: Config> ProjectServiceAccounts<'c, C> { self.project_id ) .as_str(), + &self.request_options, ) .await } @@ -94,6 +93,7 @@ impl<'c, C: Config> ProjectServiceAccounts<'c, C> { self.project_id ) .as_str(), + &self.request_options, ) .await } diff --git a/async-openai/src/project_users.rs b/async-openai/src/project_users.rs index 658031b2..2769daa0 100644 --- a/async-openai/src/project_users.rs +++ b/async-openai/src/project_users.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,7 +5,7 @@ use crate::{ ProjectUser, ProjectUserCreateRequest, ProjectUserDeleteResponse, ProjectUserListResponse, ProjectUserUpdateRequest, }, - Client, + Client, RequestOptions, }; /// Manage users within a project, including adding, updating roles, and removing users. @@ -15,6 +13,7 @@ use crate::{ pub struct ProjectUsers<'c, C: Config> { client: &'c Client, pub project_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> ProjectUsers<'c, C> { @@ -22,19 +21,17 @@ impl<'c, C: Config> ProjectUsers<'c, C> { Self { client, project_id: project_id.into(), + request_options: RequestOptions::new(), } } /// Returns a list of users in the project. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( format!("/organization/projects/{}/users", self.project_id).as_str(), - &query, + &self.request_options, ) .await } @@ -49,6 +46,7 @@ impl<'c, C: Config> ProjectUsers<'c, C> { .post( format!("/organization/projects/{}/users", self.project_id).as_str(), request, + &self.request_options, ) .await } @@ -57,7 +55,10 @@ impl<'c, C: Config> ProjectUsers<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, user_id: &str) -> Result { self.client - .get(format!("/organization/projects/{}/users/{user_id}", self.project_id).as_str()) + .get( + format!("/organization/projects/{}/users/{user_id}", self.project_id).as_str(), + &self.request_options, + ) .await } @@ -72,6 +73,7 @@ impl<'c, C: Config> ProjectUsers<'c, C> { .post( format!("/organization/projects/{}/users/{user_id}", self.project_id).as_str(), request, + &self.request_options, ) .await } @@ -80,7 +82,10 @@ impl<'c, C: Config> ProjectUsers<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, user_id: &str) -> Result { self.client - .delete(format!("/organization/projects/{}/users/{user_id}", self.project_id).as_str()) + .delete( + format!("/organization/projects/{}/users/{user_id}", self.project_id).as_str(), + &self.request_options, + ) .await } } diff --git a/async-openai/src/projects.rs b/async-openai/src/projects.rs index bdbdcc2a..6874775d 100644 --- a/async-openai/src/projects.rs +++ b/async-openai/src/projects.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -9,18 +7,22 @@ use crate::{ types::admin::projects::{ Project, ProjectCreateRequest, ProjectListResponse, ProjectUpdateRequest, }, - Client, ProjectServiceAccounts, ProjectUsers, + Client, ProjectServiceAccounts, ProjectUsers, RequestOptions, }; /// Manage the projects within an organization includes creation, updating, and archiving or projects. /// The Default project cannot be modified or archived. pub struct Projects<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Projects<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } // call [ProjectUsers] group APIs @@ -49,27 +51,29 @@ impl<'c, C: Config> Projects<'c, C> { } /// Returns a list of projects. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query("/organization/projects", &query) + .get("/organization/projects", &self.request_options) .await } /// Create a new project in the organization. Projects can be created and archived, but cannot be deleted. #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: ProjectCreateRequest) -> Result { - self.client.post("/organization/projects", request).await + self.client + .post("/organization/projects", request, &self.request_options) + .await } /// Retrieves a project. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, project_id: String) -> Result { self.client - .get(format!("/organization/projects/{project_id}").as_str()) + .get( + format!("/organization/projects/{project_id}").as_str(), + &self.request_options, + ) .await } @@ -84,6 +88,7 @@ impl<'c, C: Config> Projects<'c, C> { .post( format!("/organization/projects/{project_id}").as_str(), request, + &self.request_options, ) .await } @@ -95,6 +100,7 @@ impl<'c, C: Config> Projects<'c, C> { .post( format!("/organization/projects/{project_id}/archive").as_str(), (), + &self.request_options, ) .await } diff --git a/async-openai/src/realtime.rs b/async-openai/src/realtime.rs index bb75d217..e3cbe53a 100644 --- a/async-openai/src/realtime.rs +++ b/async-openai/src/realtime.rs @@ -6,18 +6,22 @@ use crate::{ RealtimeCallReferRequest, RealtimeCallRejectRequest, RealtimeCreateClientSecretRequest, RealtimeCreateClientSecretResponse, }, - Client, + Client, RequestOptions, }; /// Realtime API for creating sessions, managing calls, and handling WebRTC connections. /// Related guide: [Realtime API](https://platform.openai.com/docs/guides/realtime) pub struct Realtime<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Realtime<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Create a new Realtime API call over WebRTC and receive the SDP answer needed @@ -30,7 +34,7 @@ impl<'c, C: Config> Realtime<'c, C> { ) -> Result { let (bytes, headers) = self .client - .post_form_raw("/realtime/calls", request) + .post_form_raw("/realtime/calls", request, &self.request_options) .await?; // Extract Location header @@ -57,14 +61,22 @@ impl<'c, C: Config> Realtime<'c, C> { request: RealtimeCallAcceptRequest, ) -> Result<(), OpenAIError> { self.client - .post(&format!("/realtime/calls/{}/accept", call_id), request) + .post( + &format!("/realtime/calls/{}/accept", call_id), + request, + &self.request_options, + ) .await } /// End an active Realtime API call, whether it was initiated over SIP or WebRTC. pub async fn hangup_call(&self, call_id: &str) -> Result<(), OpenAIError> { self.client - .post(&format!("/realtime/calls/{}/hangup", call_id), ()) + .post( + &format!("/realtime/calls/{}/hangup", call_id), + (), + &self.request_options, + ) .await } @@ -75,7 +87,11 @@ impl<'c, C: Config> Realtime<'c, C> { request: RealtimeCallReferRequest, ) -> Result<(), OpenAIError> { self.client - .post(&format!("/realtime/calls/{}/refer", call_id), request) + .post( + &format!("/realtime/calls/{}/refer", call_id), + request, + &self.request_options, + ) .await } @@ -89,6 +105,7 @@ impl<'c, C: Config> Realtime<'c, C> { .post( &format!("/realtime/calls/{}/reject", call_id), request.unwrap_or_default(), + &self.request_options, ) .await } @@ -98,6 +115,8 @@ impl<'c, C: Config> Realtime<'c, C> { &self, request: RealtimeCreateClientSecretRequest, ) -> Result { - self.client.post("/realtime/client_secrets", request).await + self.client + .post("/realtime/client_secrets", request, &self.request_options) + .await } } diff --git a/async-openai/src/request_options.rs b/async-openai/src/request_options.rs new file mode 100644 index 00000000..91ce7669 --- /dev/null +++ b/async-openai/src/request_options.rs @@ -0,0 +1,84 @@ +use reqwest::header::HeaderMap; +use serde::Serialize; +use url::Url; + +use crate::{config::OPENAI_API_BASE, error::OpenAIError}; + +#[derive(Clone, Debug, Default)] +pub struct RequestOptions { + query: Option>, + headers: Option, +} + +impl RequestOptions { + pub(crate) fn new() -> Self { + Self { + query: None, + headers: None, + } + } + + pub(crate) fn with_headers(&mut self, headers: HeaderMap) { + // merge with existing headers or update with new headers + if let Some(existing_headers) = &mut self.headers { + existing_headers.extend(headers); + } else { + self.headers = Some(headers); + } + } + + pub(crate) fn with_header(&mut self, key: K, value: V) -> Result<(), OpenAIError> + where + K: reqwest::header::IntoHeaderName, + V: TryInto, + V::Error: Into, + { + let value = value.try_into().map_err(|e| { + OpenAIError::InvalidArgument(format!("Invalid header value: {}", e.into())) + })?; + if let Some(headers) = &mut self.headers { + headers.insert(key, value); + } else { + let mut headers = HeaderMap::new(); + headers.insert(key, value); + self.headers = Some(headers); + } + Ok(()) + } + + pub(crate) fn with_query( + &mut self, + query: &Q, + ) -> Result<(), OpenAIError> { + // Use serde_urlencoded::Serializer directly to handle any serializable type + // similar to how reqwest does it. We create a temporary URL to use query_pairs_mut() + // which allows us to handle any serializable type, not just top-level maps/structs. + let mut url = Url::parse(OPENAI_API_BASE) + .map_err(|e| OpenAIError::InvalidArgument(format!("Failed to create URL: {}", e)))?; + + { + let mut pairs = url.query_pairs_mut(); + let serializer = serde_urlencoded::Serializer::new(&mut pairs); + + query + .serialize(serializer) + .map_err(|e| OpenAIError::InvalidArgument(format!("Invalid query: {}", e)))?; + } + + // Extract query pairs from the URL and append to our vec + let query = self.query.get_or_insert_with(Vec::new); + for (key, value) in url.query_pairs() { + query.push((key.to_string(), value.to_string())); + } + + Ok(()) + } + + pub(crate) fn query(&self) -> &[(String, String)] { + self.query.as_deref().unwrap_or(&[]) + } + + pub(crate) fn headers(&self) -> Option<&HeaderMap> { + self.headers.as_ref() + } +} diff --git a/async-openai/src/responses.rs b/async-openai/src/responses.rs index 223a5b1c..73933db2 100644 --- a/async-openai/src/responses.rs +++ b/async-openai/src/responses.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -7,17 +5,21 @@ use crate::{ CreateResponse, DeleteResponse, Response, ResponseItemList, ResponseStream, TokenCountsBody, TokenCountsResource, }, - Client, + Client, RequestOptions, }; pub struct Responses<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Responses<'c, C> { /// Constructs a new Responses client. pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or @@ -34,7 +36,9 @@ impl<'c, C: Config> Responses<'c, C> { R = serde::de::DeserializeOwned )] pub async fn create(&self, request: CreateResponse) -> Result { - self.client.post("/responses", request).await + self.client + .post("/responses", request, &self.request_options) + .await } /// Creates a model response for the given input with streaming. @@ -60,17 +64,20 @@ impl<'c, C: Config> Responses<'c, C> { } request.stream = Some(true); } - Ok(self.client.post_stream("/responses", request).await) + Ok(self + .client + .post_stream("/responses", request, &self.request_options) + .await) } /// Retrieves a model response with the given ID. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn retrieve(&self, response_id: &str, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn retrieve(&self, response_id: &str) -> Result { self.client - .get_with_query(&format!("/responses/{}", response_id), &query) + .get( + &format!("/responses/{}", response_id), + &self.request_options, + ) .await } @@ -78,7 +85,10 @@ impl<'c, C: Config> Responses<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, response_id: &str) -> Result { self.client - .delete(&format!("/responses/{}", response_id)) + .delete( + &format!("/responses/{}", response_id), + &self.request_options, + ) .await } @@ -91,22 +101,22 @@ impl<'c, C: Config> Responses<'c, C> { .post( &format!("/responses/{}/cancel", response_id), serde_json::json!({}), + &self.request_options, ) .await } /// Returns a list of input items for a given response. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_input_items( + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn list_input_items( &self, response_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + ) -> Result { self.client - .get_with_query(&format!("/responses/{}/input_items", response_id), &query) + .get( + &format!("/responses/{}/input_items", response_id), + &self.request_options, + ) .await } @@ -116,6 +126,8 @@ impl<'c, C: Config> Responses<'c, C> { &self, request: TokenCountsBody, ) -> Result { - self.client.post("/responses/input_tokens", request).await + self.client + .post("/responses/input_tokens", request, &self.request_options) + .await } } diff --git a/async-openai/src/runs.rs b/async-openai/src/runs.rs index 4fad1424..39ca5b23 100644 --- a/async-openai/src/runs.rs +++ b/async-openai/src/runs.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -8,7 +6,7 @@ use crate::{ AssistantEventStream, CreateRunRequest, ListRunsResponse, ModifyRunRequest, RunObject, SubmitToolOutputsRunRequest, }, - Client, + Client, RequestOptions, }; /// Represents an execution run on a thread. @@ -17,6 +15,7 @@ use crate::{ pub struct Runs<'c, C: Config> { pub thread_id: String, client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Runs<'c, C> { @@ -24,6 +23,7 @@ impl<'c, C: Config> Runs<'c, C> { Self { client, thread_id: thread_id.into(), + request_options: RequestOptions::new(), } } @@ -36,7 +36,11 @@ impl<'c, C: Config> Runs<'c, C> { #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: CreateRunRequest) -> Result { self.client - .post(&format!("/threads/{}/runs", self.thread_id), request) + .post( + &format!("/threads/{}/runs", self.thread_id), + request, + &self.request_options, + ) .await } @@ -70,6 +74,7 @@ impl<'c, C: Config> Runs<'c, C> { .post_stream_mapped_raw_events( &format!("/threads/{}/runs", self.thread_id), request, + &self.request_options, TryFrom::try_from, ) .await) @@ -79,7 +84,10 @@ impl<'c, C: Config> Runs<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, run_id: &str) -> Result { self.client - .get(&format!("/threads/{}/runs/{run_id}", self.thread_id)) + .get( + &format!("/threads/{}/runs/{run_id}", self.thread_id), + &self.request_options, + ) .await } @@ -94,18 +102,19 @@ impl<'c, C: Config> Runs<'c, C> { .post( &format!("/threads/{}/runs/{run_id}", self.thread_id), request, + &self.request_options, ) .await } /// Returns a list of runs belonging to a thread. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query(&format!("/threads/{}/runs", self.thread_id), &query) + .get( + &format!("/threads/{}/runs", self.thread_id), + &self.request_options, + ) .await } @@ -123,6 +132,7 @@ impl<'c, C: Config> Runs<'c, C> { self.thread_id ), request, + &self.request_options, ) .await } @@ -160,6 +170,7 @@ impl<'c, C: Config> Runs<'c, C> { self.thread_id ), request, + &self.request_options, TryFrom::try_from, ) .await) @@ -172,6 +183,7 @@ impl<'c, C: Config> Runs<'c, C> { .post( &format!("/threads/{}/runs/{run_id}/cancel", self.thread_id), (), + &self.request_options, ) .await } diff --git a/async-openai/src/speech.rs b/async-openai/src/speech.rs index 4bd160af..ea0bbc74 100644 --- a/async-openai/src/speech.rs +++ b/async-openai/src/speech.rs @@ -2,16 +2,20 @@ use crate::{ config::Config, error::OpenAIError, types::audio::{CreateSpeechRequest, CreateSpeechResponse, SpeechResponseStream}, - Client, + Client, RequestOptions, }; pub struct Speech<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Speech<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Generates audio from the input text. @@ -19,7 +23,10 @@ impl<'c, C: Config> Speech<'c, C> { &self, request: CreateSpeechRequest, ) -> Result { - let (bytes, _headers) = self.client.post_raw("/audio/speech", request).await?; + let (bytes, _headers) = self + .client + .post_raw("/audio/speech", request, &self.request_options) + .await?; Ok(CreateSpeechResponse { bytes }) } @@ -49,6 +56,9 @@ impl<'c, C: Config> Speech<'c, C> { request.stream_format = Some(StreamFormat::SSE); } - Ok(self.client.post_stream("/audio/speech", request).await) + Ok(self + .client + .post_stream("/audio/speech", request, &self.request_options) + .await) } } diff --git a/async-openai/src/steps.rs b/async-openai/src/steps.rs index e82cb977..e0f8cde5 100644 --- a/async-openai/src/steps.rs +++ b/async-openai/src/steps.rs @@ -1,10 +1,8 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::assistants::{ListRunStepsResponse, RunStepObject}, - Client, + Client, RequestOptions, }; /// Represents a step in execution of a run. @@ -12,6 +10,7 @@ pub struct Steps<'c, C: Config> { pub thread_id: String, pub run_id: String, client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Steps<'c, C> { @@ -20,6 +19,7 @@ impl<'c, C: Config> Steps<'c, C> { client, thread_id: thread_id.into(), run_id: run_id.into(), + request_options: RequestOptions::new(), } } @@ -27,23 +27,23 @@ impl<'c, C: Config> Steps<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, step_id: &str) -> Result { self.client - .get(&format!( - "/threads/{}/runs/{}/steps/{step_id}", - self.thread_id, self.run_id - )) + .get( + &format!( + "/threads/{}/runs/{}/steps/{step_id}", + self.thread_id, self.run_id + ), + &self.request_options, + ) .await } /// Returns a list of run steps belonging to a run. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( &format!("/threads/{}/runs/{}/steps", self.thread_id, self.run_id), - &query, + &self.request_options, ) .await } diff --git a/async-openai/src/threads.rs b/async-openai/src/threads.rs index 5a0fe354..d6bfec34 100644 --- a/async-openai/src/threads.rs +++ b/async-openai/src/threads.rs @@ -5,7 +5,7 @@ use crate::{ AssistantEventStream, CreateThreadAndRunRequest, CreateThreadRequest, DeleteThreadResponse, ModifyThreadRequest, RunObject, ThreadObject, }, - Client, Messages, Runs, + Client, Messages, RequestOptions, Runs, }; /// Create threads that assistants can interact with. @@ -13,11 +13,15 @@ use crate::{ /// Related guide: [Assistants](https://platform.openai.com/docs/assistants/overview) pub struct Threads<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Threads<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Call [Messages] group API to manage message in [thread_id] thread. @@ -36,7 +40,9 @@ impl<'c, C: Config> Threads<'c, C> { &self, request: CreateThreadAndRunRequest, ) -> Result { - self.client.post("/threads/runs", request).await + self.client + .post("/threads/runs", request, &self.request_options) + .await } /// Create a thread and run it in one request (streaming). @@ -65,20 +71,29 @@ impl<'c, C: Config> Threads<'c, C> { } Ok(self .client - .post_stream_mapped_raw_events("/threads/runs", request, TryFrom::try_from) + .post_stream_mapped_raw_events( + "/threads/runs", + request, + &self.request_options, + TryFrom::try_from, + ) .await) } /// Create a thread. #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: CreateThreadRequest) -> Result { - self.client.post("/threads", request).await + self.client + .post("/threads", request, &self.request_options) + .await } /// Retrieves a thread. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, thread_id: &str) -> Result { - self.client.get(&format!("/threads/{thread_id}")).await + self.client + .get(&format!("/threads/{thread_id}"), &self.request_options) + .await } /// Modifies a thread. @@ -89,13 +104,19 @@ impl<'c, C: Config> Threads<'c, C> { request: ModifyThreadRequest, ) -> Result { self.client - .post(&format!("/threads/{thread_id}"), request) + .post( + &format!("/threads/{thread_id}"), + request, + &self.request_options, + ) .await } /// Delete a thread. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, thread_id: &str) -> Result { - self.client.delete(&format!("/threads/{thread_id}")).await + self.client + .delete(&format!("/threads/{thread_id}"), &self.request_options) + .await } } diff --git a/async-openai/src/traits.rs b/async-openai/src/traits.rs index 0ae7462b..65ad1441 100644 --- a/async-openai/src/traits.rs +++ b/async-openai/src/traits.rs @@ -1,3 +1,8 @@ +use reqwest::header::HeaderMap; + +use crate::{error::OpenAIError, RequestOptions}; +use serde::Serialize; + pub trait AsyncTryFrom: Sized { /// The type returned in the event of a conversion error. type Error; @@ -17,3 +22,35 @@ pub trait EventId { /// Returns the event ID fn event_id(&self) -> &str; } + +/// Trait for types that can build RequestOptions through fluent API +pub trait RequestOptionsBuilder: Sized { + /// Get mutable reference to RequestOptions (for building) + fn options_mut(&mut self) -> &mut RequestOptions; + + /// Get reference to RequestOptions + fn options(&self) -> &RequestOptions; + + /// Add headers to RequestOptions + fn headers(mut self, headers: HeaderMap) -> Self { + self.options_mut().with_headers(headers); + self + } + + /// Add a single header to RequestOptions + fn header(mut self, key: K, value: V) -> Result + where + K: reqwest::header::IntoHeaderName, + V: TryInto, + V::Error: Into, + { + self.options_mut().with_header(key, value)?; + Ok(self) + } + + /// Add query parameters to RequestOptions + fn query(mut self, query: &Q) -> Result { + self.options_mut().with_query(query)?; + Ok(self) + } +} diff --git a/async-openai/src/transcriptions.rs b/async-openai/src/transcriptions.rs index d2cf1f59..7a44d7ef 100644 --- a/async-openai/src/transcriptions.rs +++ b/async-openai/src/transcriptions.rs @@ -8,16 +8,20 @@ use crate::{ CreateTranscriptionResponseJson, CreateTranscriptionResponseVerboseJson, TranscriptionResponseStream, }, - Client, + Client, RequestOptions, }; pub struct Transcriptions<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Transcriptions<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Transcribes audio into the input language. @@ -31,7 +35,7 @@ impl<'c, C: Config> Transcriptions<'c, C> { request: CreateTranscriptionRequest, ) -> Result { self.client - .post_form("/audio/transcriptions", request) + .post_form("/audio/transcriptions", request, &self.request_options) .await } @@ -59,7 +63,7 @@ impl<'c, C: Config> Transcriptions<'c, C> { } self.client - .post_form_stream("/audio/transcriptions", request) + .post_form_stream("/audio/transcriptions", request, &self.request_options) .await } @@ -74,7 +78,7 @@ impl<'c, C: Config> Transcriptions<'c, C> { request: CreateTranscriptionRequest, ) -> Result { self.client - .post_form("/audio/transcriptions", request) + .post_form("/audio/transcriptions", request, &self.request_options) .await } @@ -89,7 +93,7 @@ impl<'c, C: Config> Transcriptions<'c, C> { request: CreateTranscriptionRequest, ) -> Result { self.client - .post_form("/audio/transcriptions", request) + .post_form("/audio/transcriptions", request, &self.request_options) .await } @@ -100,7 +104,7 @@ impl<'c, C: Config> Transcriptions<'c, C> { ) -> Result { let (bytes, _headers) = self .client - .post_form_raw("/audio/transcriptions", request) + .post_form_raw("/audio/transcriptions", request, &self.request_options) .await?; Ok(bytes) } diff --git a/async-openai/src/translations.rs b/async-openai/src/translations.rs index 93260499..636423de 100644 --- a/async-openai/src/translations.rs +++ b/async-openai/src/translations.rs @@ -7,16 +7,20 @@ use crate::{ CreateTranslationRequest, CreateTranslationResponseJson, CreateTranslationResponseVerboseJson, }, - Client, + Client, RequestOptions, }; pub struct Translations<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Translations<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Translates audio into English. @@ -29,7 +33,9 @@ impl<'c, C: Config> Translations<'c, C> { &self, request: CreateTranslationRequest, ) -> Result { - self.client.post_form("/audio/translations", request).await + self.client + .post_form("/audio/translations", request, &self.request_options) + .await } /// Translates audio into English. @@ -42,7 +48,9 @@ impl<'c, C: Config> Translations<'c, C> { &self, request: CreateTranslationRequest, ) -> Result { - self.client.post_form("/audio/translations", request).await + self.client + .post_form("/audio/translations", request, &self.request_options) + .await } /// Transcribes audio into the input language. @@ -52,7 +60,7 @@ impl<'c, C: Config> Translations<'c, C> { ) -> Result { let (bytes, _headers) = self .client - .post_form_raw("/audio/translations", request) + .post_form_raw("/audio/translations", request, &self.request_options) .await?; Ok(bytes) } diff --git a/async-openai/src/types/admin/api_keys.rs b/async-openai/src/types/admin/api_keys.rs index 0be8723f..3578591c 100644 --- a/async-openai/src/types/admin/api_keys.rs +++ b/async-openai/src/types/admin/api_keys.rs @@ -2,6 +2,35 @@ use crate::types::OpenAIError; use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Sort order for listing admin API keys. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListAdminApiKeysOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing admin API keys. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListAdminApiKeysQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListAdminApiKeysQuery { + /// Return keys with IDs that come after this ID in the pagination order. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Order results by creation time, ascending or descending. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// Maximum number of keys to return. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, +} + /// Represents an individual Admin API key in an org. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct AdminApiKey { diff --git a/async-openai/src/types/admin/audit_logs.rs b/async-openai/src/types/admin/audit_logs.rs index 1d3b137e..05bfa4d8 100644 --- a/async-openai/src/types/admin/audit_logs.rs +++ b/async-openai/src/types/admin/audit_logs.rs @@ -1,5 +1,44 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Query parameters for listing audit logs. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListAuditLogsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListAuditLogsQuery { + /// Return only events whose `effective_at` (Unix seconds) is in this range. + #[serde(skip_serializing_if = "Option::is_none")] + pub effective_at: Option, + /// Return only events for these projects. + #[serde(skip_serializing_if = "Option::is_none")] + pub project_ids: Option>, + /// Return only events with a `type` in one of these values. + #[serde(skip_serializing_if = "Option::is_none")] + pub event_types: Option>, + /// Return only events performed by these actors. + #[serde(skip_serializing_if = "Option::is_none")] + pub actor_ids: Option>, + /// Return only events performed by users with these emails. + #[serde(skip_serializing_if = "Option::is_none")] + pub actor_emails: Option>, + /// Return only events performed on these targets. + #[serde(skip_serializing_if = "Option::is_none")] + pub resource_ids: Option>, + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, +} + /// The event type. #[derive(Debug, Serialize, Deserialize)] pub enum AuditLogEventType { diff --git a/async-openai/src/types/admin/certificates.rs b/async-openai/src/types/admin/certificates.rs index e10774f0..2370f00b 100644 --- a/async-openai/src/types/admin/certificates.rs +++ b/async-openai/src/types/admin/certificates.rs @@ -2,6 +2,77 @@ use crate::error::OpenAIError; use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Sort order for listing organization certificates. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListOrganizationCertificatesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing organization certificates. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListOrganizationCertificatesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListOrganizationCertificatesQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} + +/// Sort order for listing project certificates. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListProjectCertificatesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing project certificates. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListProjectCertificatesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListProjectCertificatesQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} + +/// Query parameters for getting a certificate. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetCertificateQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetCertificateQuery { + /// A list of additional fields to include in the response. Currently the only supported value is `content` to fetch the PEM content of the certificate. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + /// Represents an individual certificate uploaded to the organization. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct Certificate { diff --git a/async-openai/src/types/admin/invites.rs b/async-openai/src/types/admin/invites.rs index a327af83..1db16692 100644 --- a/async-openai/src/types/admin/invites.rs +++ b/async-openai/src/types/admin/invites.rs @@ -3,6 +3,22 @@ use crate::types::OrganizationRole; use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Query parameters for listing invites. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListInvitesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListInvitesQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} + #[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)] #[serde(rename_all = "lowercase")] pub enum InviteStatus { diff --git a/async-openai/src/types/admin/project_api_keys.rs b/async-openai/src/types/admin/project_api_keys.rs index 2d339d7b..3bf0875a 100644 --- a/async-openai/src/types/admin/project_api_keys.rs +++ b/async-openai/src/types/admin/project_api_keys.rs @@ -1,8 +1,26 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; use serde::{Deserialize, Serialize}; use crate::types::admin::project_service_accounts::ProjectServiceAccount; use crate::types::admin::project_users::ProjectUser; +/// Query parameters for listing project API keys. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListProjectApiKeysQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListProjectApiKeysQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} + /// Represents an individual API key in a project. #[derive(Debug, Serialize, Deserialize)] pub struct ProjectApiKey { diff --git a/async-openai/src/types/admin/project_rate_limits.rs b/async-openai/src/types/admin/project_rate_limits.rs index fdadb03b..22ca1e86 100644 --- a/async-openai/src/types/admin/project_rate_limits.rs +++ b/async-openai/src/types/admin/project_rate_limits.rs @@ -1,7 +1,25 @@ +use crate::error::OpenAIError; use derive_builder::Builder; use serde::{Deserialize, Serialize}; -use crate::types::OpenAIError; +/// Query parameters for listing project rate limits. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListProjectRateLimitsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListProjectRateLimitsQuery { + /// A limit on the number of objects to be returned. The default is 100. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, +} /// Represents a project rate limit config. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] diff --git a/async-openai/src/types/admin/project_service_accounts.rs b/async-openai/src/types/admin/project_service_accounts.rs index 1ec1ae6a..a1d09f9b 100644 --- a/async-openai/src/types/admin/project_service_accounts.rs +++ b/async-openai/src/types/admin/project_service_accounts.rs @@ -1,7 +1,25 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; use serde::{Deserialize, Serialize}; use crate::types::admin::project_users::ProjectUserRole; +/// Query parameters for listing project service accounts. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListProjectServiceAccountsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListProjectServiceAccountsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} + /// Represents an individual service account in a project. #[derive(Debug, Serialize, Deserialize)] pub struct ProjectServiceAccount { diff --git a/async-openai/src/types/admin/project_users.rs b/async-openai/src/types/admin/project_users.rs index 51a88199..585b0e1b 100644 --- a/async-openai/src/types/admin/project_users.rs +++ b/async-openai/src/types/admin/project_users.rs @@ -2,6 +2,22 @@ use crate::types::OpenAIError; use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Query parameters for listing project users. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListProjectUsersQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListProjectUsersQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} + /// Represents an individual user in a project. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct ProjectUser { diff --git a/async-openai/src/types/admin/projects.rs b/async-openai/src/types/admin/projects.rs index 20fdc309..1a8189d7 100644 --- a/async-openai/src/types/admin/projects.rs +++ b/async-openai/src/types/admin/projects.rs @@ -2,6 +2,25 @@ use crate::types::OpenAIError; use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Query parameters for listing projects. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListProjectsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListProjectsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// If `true` returns all projects including those that have been `archived`. Archived projects are not included by default. + #[serde(skip_serializing_if = "Option::is_none")] + pub include_archived: Option, +} + /// `active` or `archived` #[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)] #[serde(rename_all = "lowercase")] diff --git a/async-openai/src/types/admin/usage.rs b/async-openai/src/types/admin/usage.rs index dfdb8da1..2b2f16df 100644 --- a/async-openai/src/types/admin/usage.rs +++ b/async-openai/src/types/admin/usage.rs @@ -1,29 +1,5 @@ use serde::{Deserialize, Serialize}; -/// Width of each time bucket in response. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum UsageBucketWidth { - #[serde(rename = "1m")] - OneMinute, - #[serde(rename = "1h")] - OneHour, - #[serde(rename = "1d")] - OneDay, -} - -/// Fields to group usage data by. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum UsageGroupBy { - ProjectId, - UserId, - ApiKeyId, - Model, - Batch, - ServiceTier, -} - /// Query parameters for organization usage endpoints. #[derive(Debug, Clone, Serialize, Default)] pub struct UsageQueryParams { @@ -61,6 +37,30 @@ pub struct UsageQueryParams { pub page: Option, } +/// Width of each time bucket in response. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum UsageBucketWidth { + #[serde(rename = "1m")] + OneMinute, + #[serde(rename = "1h")] + OneHour, + #[serde(rename = "1d")] + OneDay, +} + +/// Fields to group usage data by. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum UsageGroupBy { + ProjectId, + UserId, + ApiKeyId, + Model, + Batch, + ServiceTier, +} + /// Response structure for organization usage endpoints. #[derive(Debug, Clone, Deserialize)] pub struct UsageResponse { diff --git a/async-openai/src/types/admin/users.rs b/async-openai/src/types/admin/users.rs index 8ed29112..32e3b1a0 100644 --- a/async-openai/src/types/admin/users.rs +++ b/async-openai/src/types/admin/users.rs @@ -3,6 +3,25 @@ use crate::types::OrganizationRole; use derive_builder::Builder; use serde::{Deserialize, Serialize}; +/// Query parameters for listing users. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListUsersQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListUsersQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Filter by the email address of users. + #[serde(skip_serializing_if = "Option::is_none")] + pub emails: Option>, +} + /// Represents an individual `user` within an organization. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct User { diff --git a/async-openai/src/types/assistants/api.rs b/async-openai/src/types/assistants/api.rs new file mode 100644 index 00000000..d422fc48 --- /dev/null +++ b/async-openai/src/types/assistants/api.rs @@ -0,0 +1,118 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing messages. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListMessagesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing messages. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListMessagesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListMessagesQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, + /// Filter messages by the run ID that generated them. + #[serde(skip_serializing_if = "Option::is_none")] + pub run_id: Option, +} + +/// Sort order for listing runs. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListRunsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing runs. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListRunsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListRunsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, +} + +/// Sort order for listing run steps. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListRunStepsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing run steps. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListRunStepsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListRunStepsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, + /// A list of additional fields to include in the response. Currently the only supported value is `step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result content. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + +/// Query parameters for retrieving a run step. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetRunStepQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetRunStepQuery { + /// A list of additional fields to include in the response. Currently the only supported value is `step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result content. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} diff --git a/async-openai/src/types/assistants/mod.rs b/async-openai/src/types/assistants/mod.rs index b0debabf..3f645f7a 100644 --- a/async-openai/src/types/assistants/mod.rs +++ b/async-openai/src/types/assistants/mod.rs @@ -1,3 +1,4 @@ +mod api; mod assistant; mod assistant_impls; mod assistant_stream; @@ -6,6 +7,7 @@ mod run; mod step; mod thread; +pub use api::*; pub use assistant::*; pub use assistant_stream::*; pub use message::*; diff --git a/async-openai/src/types/batches/api.rs b/async-openai/src/types/batches/api.rs new file mode 100644 index 00000000..c6c4bdc6 --- /dev/null +++ b/async-openai/src/types/batches/api.rs @@ -0,0 +1,19 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::Serialize; + +/// Query parameters for listing batches. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListBatchesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListBatchesQuery { + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, +} diff --git a/async-openai/src/types/batches/mod.rs b/async-openai/src/types/batches/mod.rs index ab4c11ca..98701977 100644 --- a/async-openai/src/types/batches/mod.rs +++ b/async-openai/src/types/batches/mod.rs @@ -1,3 +1,5 @@ +mod api; mod batch; +pub use api::*; pub use batch::*; diff --git a/async-openai/src/types/chat/api.rs b/async-openai/src/types/chat/api.rs new file mode 100644 index 00000000..a488eccc --- /dev/null +++ b/async-openai/src/types/chat/api.rs @@ -0,0 +1,67 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing chat completions. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListChatCompletionsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing chat completions. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListChatCompletionsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListChatCompletionsQuery { + /// The model used to generate the Chat Completions. + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + /// A list of metadata keys to filter the Chat Completions by. + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option, + /// Identifier for the last chat completion from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of Chat Completions to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for Chat Completions by timestamp. Use `asc` for ascending order or `desc` for descending order. Defaults to `asc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} + +/// Sort order for listing chat completion messages. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum GetChatCompletionMessagesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for getting chat completion messages. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetChatCompletionMessagesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetChatCompletionMessagesQuery { + /// Identifier for the last message from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of messages to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for messages by timestamp. Use `asc` for ascending order or `desc` for descending order. Defaults to `asc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} diff --git a/async-openai/src/types/chat/mod.rs b/async-openai/src/types/chat/mod.rs index 4634f935..f3f50df4 100644 --- a/async-openai/src/types/chat/mod.rs +++ b/async-openai/src/types/chat/mod.rs @@ -1,3 +1,5 @@ +mod api; mod chat_types; +pub use api::*; pub use chat_types::*; diff --git a/async-openai/src/types/chatkit/api.rs b/async-openai/src/types/chatkit/api.rs new file mode 100644 index 00000000..717f9313 --- /dev/null +++ b/async-openai/src/types/chatkit/api.rs @@ -0,0 +1,70 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing ChatKit threads. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListChatKitThreadsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing ChatKit threads. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListChatKitThreadsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListChatKitThreadsQuery { + /// Maximum number of thread items to return. Defaults to 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for results by creation time. Defaults to `desc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// List items created after this thread item ID. Defaults to null for the first page. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// List items created before this thread item ID. Defaults to null for the newest results. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, + /// Filter threads that belong to this user identifier. Defaults to null to return all users. + #[serde(skip_serializing_if = "Option::is_none")] + pub user: Option, +} + +/// Sort order for listing ChatKit thread items. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListChatKitThreadItemsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing ChatKit thread items. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListChatKitThreadItemsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListChatKitThreadItemsQuery { + /// Maximum number of thread items to return. Defaults to 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for results by creation time. Defaults to `desc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// List items created after this thread item ID. Defaults to null for the first page. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// List items created before this thread item ID. Defaults to null for the newest results. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, +} diff --git a/async-openai/src/types/chatkit/mod.rs b/async-openai/src/types/chatkit/mod.rs index ad660ea1..bf0bbd69 100644 --- a/async-openai/src/types/chatkit/mod.rs +++ b/async-openai/src/types/chatkit/mod.rs @@ -1,5 +1,7 @@ +mod api; mod session; mod thread; +pub use api::*; pub use session::*; pub use thread::*; diff --git a/async-openai/src/types/containers/api.rs b/async-openai/src/types/containers/api.rs new file mode 100644 index 00000000..0b48d8e4 --- /dev/null +++ b/async-openai/src/types/containers/api.rs @@ -0,0 +1,61 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing containers. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListContainersOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing containers. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListContainersQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListContainersQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} + +/// Sort order for listing container files. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListContainerFilesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing container files. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListContainerFilesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListContainerFilesQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} diff --git a/async-openai/src/types/containers/container.rs b/async-openai/src/types/containers/container.rs index 597b4466..b9e19e7a 100644 --- a/async-openai/src/types/containers/container.rs +++ b/async-openai/src/types/containers/container.rs @@ -83,25 +83,6 @@ pub struct DeleteContainerResponse { pub deleted: bool, } -/// Query parameters for listing containers. -#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] -#[builder(name = "ListContainersQueryArgs")] -#[builder(pattern = "mutable")] -#[builder(setter(into, strip_option), default)] -#[builder(derive(Debug))] -#[builder(build_fn(error = "OpenAIError"))] -pub struct ListContainersQuery { - /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. - #[serde(skip_serializing_if = "Option::is_none")] - pub limit: Option, - /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. - #[serde(skip_serializing_if = "Option::is_none")] - pub order: Option, - /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. - #[serde(skip_serializing_if = "Option::is_none")] - pub after: Option, -} - // Container File types /// The container file object represents a file in a container. @@ -155,22 +136,3 @@ pub struct DeleteContainerFileResponse { pub object: String, pub deleted: bool, } - -/// Query parameters for listing container files. -#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] -#[builder(name = "ListContainerFilesQueryArgs")] -#[builder(pattern = "mutable")] -#[builder(setter(into, strip_option), default)] -#[builder(derive(Debug))] -#[builder(build_fn(error = "OpenAIError"))] -pub struct ListContainerFilesQuery { - /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. - #[serde(skip_serializing_if = "Option::is_none")] - pub limit: Option, - /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. - #[serde(skip_serializing_if = "Option::is_none")] - pub order: Option, - /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. - #[serde(skip_serializing_if = "Option::is_none")] - pub after: Option, -} diff --git a/async-openai/src/types/containers/mod.rs b/async-openai/src/types/containers/mod.rs index f6990dfa..b7ab5455 100644 --- a/async-openai/src/types/containers/mod.rs +++ b/async-openai/src/types/containers/mod.rs @@ -1,3 +1,5 @@ +mod api; mod container; +pub use api::*; pub use container::*; diff --git a/async-openai/src/types/evals/api.rs b/async-openai/src/types/evals/api.rs new file mode 100644 index 00000000..1da411af --- /dev/null +++ b/async-openai/src/types/evals/api.rs @@ -0,0 +1,135 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing evals. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListEvalsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Order by field for listing evals. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum ListEvalsOrderBy { + /// Order by creation time + CreatedAt, + /// Order by last updated time + UpdatedAt, +} + +/// Query parameters for listing evals. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListEvalsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListEvalsQuery { + /// Identifier for the last eval from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of evals to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// Evals can be ordered by creation time or last updated time. Use `created_at` for creation time or `updated_at` for last updated time. + #[serde(skip_serializing_if = "Option::is_none")] + pub order_by: Option, +} + +/// Sort order for getting eval runs. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum GetEvalRunsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Status filter for eval runs. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum GetEvalRunsStatus { + /// Queued status + Queued, + /// In progress status + InProgress, + /// Completed status + Completed, + /// Canceled status + Canceled, + /// Failed status + Failed, +} + +/// Query parameters for getting eval runs. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetEvalRunsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetEvalRunsQuery { + /// Identifier for the last run from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of runs to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for descending order. Defaults to `asc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` | `canceled`. + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option, +} + +/// Sort order for getting eval run output items. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum GetEvalRunOutputItemsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Status filter for eval run output items. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum GetEvalRunOutputItemsStatus { + /// Failed status + Fail, + /// Pass status + Pass, +} + +/// Query parameters for getting eval run output items. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetEvalRunOutputItemsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetEvalRunOutputItemsQuery { + /// Identifier for the last output item from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of output items to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Filter output items by status. Use `failed` to filter by failed output items or `pass` to filter by passed output items. + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option, + /// Sort order for output items by timestamp. Use `asc` for ascending order or `desc` for descending order. Defaults to `asc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} diff --git a/async-openai/src/types/evals/mod.rs b/async-openai/src/types/evals/mod.rs index 635e168e..4dee2981 100644 --- a/async-openai/src/types/evals/mod.rs +++ b/async-openai/src/types/evals/mod.rs @@ -1,3 +1,5 @@ +mod api; mod eval; +pub use api::*; pub use eval::*; diff --git a/async-openai/src/types/files/api.rs b/async-openai/src/types/files/api.rs new file mode 100644 index 00000000..b5f8df64 --- /dev/null +++ b/async-openai/src/types/files/api.rs @@ -0,0 +1,35 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing files. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListFilesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing files. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListFilesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListFilesQuery { + /// Only return files with the given purpose. + #[serde(skip_serializing_if = "Option::is_none")] + pub purpose: Option, + /// A limit on the number of objects to be returned. Limit can range between 1 and 10,000, and the default is 10,000. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} diff --git a/async-openai/src/types/files/mod.rs b/async-openai/src/types/files/mod.rs index 529c201f..add19077 100644 --- a/async-openai/src/types/files/mod.rs +++ b/async-openai/src/types/files/mod.rs @@ -1,3 +1,5 @@ +mod api; mod file; +pub use api::*; pub use file::*; diff --git a/async-openai/src/types/finetuning/api.rs b/async-openai/src/types/finetuning/api.rs new file mode 100644 index 00000000..737f6c30 --- /dev/null +++ b/async-openai/src/types/finetuning/api.rs @@ -0,0 +1,109 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing fine-tuning checkpoint permissions. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListFineTuningCheckpointPermissionsOrder { + /// Ascending order + Ascending, + /// Descending order + Descending, +} + +/// Query parameters for listing fine-tuning checkpoint permissions. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListFineTuningCheckpointPermissionsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListFineTuningCheckpointPermissionsQuery { + /// The ID of the project to get permissions for. + #[serde(skip_serializing_if = "Option::is_none")] + pub project_id: Option, + /// Identifier for the last permission ID from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of permissions to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// The order in which to retrieve permissions. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} + +/// Query parameters for listing fine-tuning jobs. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListPaginatedFineTuningJobsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListPaginatedFineTuningJobsQuery { + /// Identifier for the last job from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of fine-tuning jobs to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Optional metadata filter. To filter, use the syntax `metadata[k]=v`. Alternatively, set `metadata=null` to indicate no metadata. + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option, +} + +/// Sort order for listing fine-tuning job checkpoints. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListFineTuningJobCheckpointsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing fine-tuning job checkpoints. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListFineTuningJobCheckpointsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListFineTuningJobCheckpointsQuery { + /// Identifier for the last checkpoint from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of checkpoints to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, +} + +/// Sort order for listing fine-tuning events. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListFineTuningEventsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing fine-tuning events. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListFineTuningEventsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListFineTuningEventsQuery { + /// Identifier for the last event from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Number of events to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order for events by timestamp. Use `asc` for ascending order or `desc` for descending order. Defaults to `asc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, +} diff --git a/async-openai/src/types/finetuning/mod.rs b/async-openai/src/types/finetuning/mod.rs index 9e4375ee..f049ec82 100644 --- a/async-openai/src/types/finetuning/mod.rs +++ b/async-openai/src/types/finetuning/mod.rs @@ -1,3 +1,5 @@ +mod api; mod fine_tuning; +pub use api::*; pub use fine_tuning::*; diff --git a/async-openai/src/types/impls.rs b/async-openai/src/types/impls.rs index b868d136..868e227a 100644 --- a/async-openai/src/types/impls.rs +++ b/async-openai/src/types/impls.rs @@ -1371,3 +1371,119 @@ impl From<&str> for EasyInputContent { Self::Text(value.to_owned()) } } + +// request builder impls macro + +/// Macro to implement `RequestOptionsBuilder` for wrapper types containing `RequestOptions` +macro_rules! impl_request_options_builder { + ($type:ident) => { + impl<'c, C: crate::config::Config> crate::traits::RequestOptionsBuilder for $type<'c, C> { + fn options_mut(&mut self) -> &mut crate::RequestOptions { + &mut self.request_options + } + + fn options(&self) -> &crate::RequestOptions { + &self.request_options + } + } + }; +} + +use crate::{ + admin_api_keys::AdminAPIKeys, + assistants::Assistants, + audio::Audio, + audit_logs::AuditLogs, + batches::Batches, + certificates::Certificates, + chat::Chat, + chatkit::{Chatkit, ChatkitSessions, ChatkitThreads}, + completion::Completions, + container_files::ContainerFiles, + containers::Containers, + conversation_items::ConversationItems, + conversations::Conversations, + embedding::Embeddings, + eval_run_output_items::EvalRunOutputItems, + eval_runs::EvalRuns, + evals::Evals, + file::Files, + fine_tuning::FineTuning, + image::Images, + invites::Invites, + messages::Messages, + model::Models, + moderation::Moderations, + project_api_keys::ProjectAPIKeys, + project_certificates::ProjectCertificates, + project_rate_limits::ProjectRateLimits, + project_service_accounts::ProjectServiceAccounts, + project_users::ProjectUsers, + projects::Projects, + responses::Responses, + runs::Runs, + speech::Speech, + steps::Steps, + threads::Threads, + transcriptions::Transcriptions, + translations::Translations, + uploads::Uploads, + usage::Usage, + users::Users, + vector_store_file_batches::VectorStoreFileBatches, + vector_store_files::VectorStoreFiles, + vector_stores::VectorStores, + video::Videos, +}; + +#[cfg(feature = "realtime")] +use crate::Realtime; + +impl_request_options_builder!(AdminAPIKeys); +impl_request_options_builder!(Assistants); +impl_request_options_builder!(Audio); +impl_request_options_builder!(AuditLogs); +impl_request_options_builder!(Batches); +impl_request_options_builder!(Certificates); +impl_request_options_builder!(Chat); +impl_request_options_builder!(Chatkit); +impl_request_options_builder!(ChatkitSessions); +impl_request_options_builder!(ChatkitThreads); +impl_request_options_builder!(Completions); +impl_request_options_builder!(ContainerFiles); +impl_request_options_builder!(Containers); +impl_request_options_builder!(ConversationItems); +impl_request_options_builder!(Conversations); +impl_request_options_builder!(Embeddings); +impl_request_options_builder!(Evals); +impl_request_options_builder!(EvalRunOutputItems); +impl_request_options_builder!(EvalRuns); +impl_request_options_builder!(Files); +impl_request_options_builder!(FineTuning); +impl_request_options_builder!(Images); +impl_request_options_builder!(Invites); +impl_request_options_builder!(Messages); +impl_request_options_builder!(Models); +impl_request_options_builder!(Moderations); +impl_request_options_builder!(Projects); +impl_request_options_builder!(ProjectUsers); +impl_request_options_builder!(ProjectServiceAccounts); +impl_request_options_builder!(ProjectAPIKeys); +impl_request_options_builder!(ProjectRateLimits); +impl_request_options_builder!(ProjectCertificates); +#[cfg(feature = "realtime")] +impl_request_options_builder!(Realtime); +impl_request_options_builder!(Responses); +impl_request_options_builder!(Runs); +impl_request_options_builder!(Speech); +impl_request_options_builder!(Steps); +impl_request_options_builder!(Threads); +impl_request_options_builder!(Transcriptions); +impl_request_options_builder!(Translations); +impl_request_options_builder!(Uploads); +impl_request_options_builder!(Usage); +impl_request_options_builder!(Users); +impl_request_options_builder!(VectorStoreFileBatches); +impl_request_options_builder!(VectorStoreFiles); +impl_request_options_builder!(VectorStores); +impl_request_options_builder!(Videos); diff --git a/async-openai/src/types/responses/api.rs b/async-openai/src/types/responses/api.rs new file mode 100644 index 00000000..4bb3aa93 --- /dev/null +++ b/async-openai/src/types/responses/api.rs @@ -0,0 +1,107 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +use super::conversation::{IncludeParam, ListOrder}; + +/// Query parameters for listing conversation items. +#[derive(Clone, Serialize, Default, Debug, Deserialize, Builder, PartialEq)] +#[builder(name = "ListConversationItemsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListConversationItemsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// The order to return the input items in. Default is `desc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// An item ID to list items after, used in pagination. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Specify additional output data to include in the model response. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + +/// Sort order for listing input items. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListInputItemsOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for getting a response. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetResponseQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetResponseQuery { + /// Additional fields to include in the response. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, + /// If set to true, the model response data will be streamed to the client as it is generated using server-sent events. + #[serde(skip_serializing_if = "Option::is_none")] + pub stream: Option, + /// The sequence number of the event after which to start streaming. + #[serde(skip_serializing_if = "Option::is_none")] + pub starting_after: Option, + /// When true, stream obfuscation will be enabled. + #[serde(skip_serializing_if = "Option::is_none")] + pub include_obfuscation: Option, +} + +/// Query parameters for listing input items. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListInputItemsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListInputItemsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// The order to return the input items in. Default is `desc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// An item ID to list items after, used in pagination. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// Additional fields to include in the response. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + +/// Query parameters for getting a conversation item. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "GetConversationItemQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct GetConversationItemQuery { + /// Additional fields to include in the response. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + +/// Query parameters for creating conversation items. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "CreateConversationItemsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct CreateConversationItemsQuery { + /// Additional fields to include in the response. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} diff --git a/async-openai/src/types/responses/conversation.rs b/async-openai/src/types/responses/conversation.rs index 5ee65020..94d23545 100644 --- a/async-openai/src/types/responses/conversation.rs +++ b/async-openai/src/types/responses/conversation.rs @@ -213,31 +213,6 @@ pub enum IncludeParam { ReasoningEncryptedContent, } -/// Query parameters for listing conversation items. -#[derive(Clone, Serialize, Default, Debug, Deserialize, Builder, PartialEq)] -#[builder(name = "ListConversationItemsQueryArgs")] -#[builder(pattern = "mutable")] -#[builder(setter(into, strip_option), default)] -#[builder(derive(Debug))] -#[builder(build_fn(error = "OpenAIError"))] -pub struct ListConversationItemsQuery { - /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. - #[serde(skip_serializing_if = "Option::is_none")] - pub limit: Option, - - /// The order to return the input items in. Default is `desc`. - #[serde(skip_serializing_if = "Option::is_none")] - pub order: Option, - - /// An item ID to list items after, used in pagination. - #[serde(skip_serializing_if = "Option::is_none")] - pub after: Option, - - /// Specify additional output data to include in the model response. - #[serde(skip_serializing_if = "Option::is_none")] - pub include: Option>, -} - /// The order to return items in. #[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] #[serde(rename_all = "lowercase")] diff --git a/async-openai/src/types/responses/mod.rs b/async-openai/src/types/responses/mod.rs index 51f2d733..65003d87 100644 --- a/async-openai/src/types/responses/mod.rs +++ b/async-openai/src/types/responses/mod.rs @@ -1,7 +1,9 @@ +mod api; mod conversation; mod response; mod stream; +pub use api::*; pub use conversation::*; pub use response::*; pub use stream::*; diff --git a/async-openai/src/types/vectorstores/api.rs b/async-openai/src/types/vectorstores/api.rs new file mode 100644 index 00000000..7e24a8fb --- /dev/null +++ b/async-openai/src/types/vectorstores/api.rs @@ -0,0 +1,133 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing vector stores. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListVectorStoresOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing vector stores. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListVectorStoresQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListVectorStoresQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, +} + +/// Sort order for listing files in vector store batch. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListFilesInVectorStoreBatchOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Filter by file status for files in vector store batch. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum ListFilesInVectorStoreBatchFilter { + /// In progress status + InProgress, + /// Completed status + Completed, + /// Failed status + Failed, + /// Cancelled status + Cancelled, +} + +/// Query parameters for listing files in vector store batch. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListFilesInVectorStoreBatchQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListFilesInVectorStoreBatchQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, + /// Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + #[serde(skip_serializing_if = "Option::is_none")] + pub filter: Option, +} + +/// Sort order for listing vector store files. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListVectorStoreFilesOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Filter by file status for vector store files. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum ListVectorStoreFilesFilter { + /// In progress status + InProgress, + /// Completed status + Completed, + /// Failed status + Failed, + /// Cancelled status + Cancelled, +} + +/// Query parameters for listing vector store files. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListVectorStoreFilesQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListVectorStoreFilesQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// A cursor for use in pagination. `after` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + /// A cursor for use in pagination. `before` is an object ID that defines your place in the list. + #[serde(skip_serializing_if = "Option::is_none")] + pub before: Option, + /// Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + #[serde(skip_serializing_if = "Option::is_none")] + pub filter: Option, +} diff --git a/async-openai/src/types/vectorstores/mod.rs b/async-openai/src/types/vectorstores/mod.rs index e38e391f..8dda824f 100644 --- a/async-openai/src/types/vectorstores/mod.rs +++ b/async-openai/src/types/vectorstores/mod.rs @@ -1,3 +1,5 @@ +mod api; mod vector_store; +pub use api::*; pub use vector_store::*; diff --git a/async-openai/src/types/videos/api.rs b/async-openai/src/types/videos/api.rs new file mode 100644 index 00000000..5ef570b1 --- /dev/null +++ b/async-openai/src/types/videos/api.rs @@ -0,0 +1,32 @@ +use crate::error::OpenAIError; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +/// Sort order for listing videos. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListVideosOrder { + /// Ascending order + Asc, + /// Descending order + Desc, +} + +/// Query parameters for listing videos. +#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq)] +#[builder(name = "ListVideosQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListVideosQuery { + /// Number of items to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// Sort order of results by timestamp. Use `asc` for ascending order or `desc` for descending order. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + /// Identifier for the last item from the previous pagination request. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, +} diff --git a/async-openai/src/types/videos/mod.rs b/async-openai/src/types/videos/mod.rs index a361cb4e..7bcc8a28 100644 --- a/async-openai/src/types/videos/mod.rs +++ b/async-openai/src/types/videos/mod.rs @@ -1,3 +1,5 @@ +mod api; mod video; +pub use api::*; pub use video::*; diff --git a/async-openai/src/uploads.rs b/async-openai/src/uploads.rs index 8a3ef7f3..4000a961 100644 --- a/async-openai/src/uploads.rs +++ b/async-openai/src/uploads.rs @@ -4,17 +4,21 @@ use crate::{ types::uploads::{ AddUploadPartRequest, CompleteUploadRequest, CreateUploadRequest, Upload, UploadPart, }, - Client, + Client, RequestOptions, }; /// Allows you to upload large files in multiple parts. pub struct Uploads<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Uploads<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Creates an intermediate [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object that @@ -32,7 +36,9 @@ impl<'c, C: Config> Uploads<'c, C> { /// [creating a File](https://platform.openai.com/docs/api-reference/files/create). #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] pub async fn create(&self, request: CreateUploadRequest) -> Result { - self.client.post("/uploads", request).await + self.client + .post("/uploads", request, &self.request_options) + .await } /// Adds a [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an @@ -54,7 +60,11 @@ impl<'c, C: Config> Uploads<'c, C> { request: AddUploadPartRequest, ) -> Result { self.client - .post_form(&format!("/uploads/{upload_id}/parts"), request) + .post_form( + &format!("/uploads/{upload_id}/parts"), + request, + &self.request_options, + ) .await } @@ -75,7 +85,11 @@ impl<'c, C: Config> Uploads<'c, C> { request: CompleteUploadRequest, ) -> Result { self.client - .post(&format!("/uploads/{upload_id}/complete"), request) + .post( + &format!("/uploads/{upload_id}/complete"), + request, + &self.request_options, + ) .await } @@ -86,6 +100,7 @@ impl<'c, C: Config> Uploads<'c, C> { .post( &format!("/uploads/{upload_id}/cancel"), serde_json::json!({}), + &self.request_options, ) .await } diff --git a/async-openai/src/usage.rs b/async-openai/src/usage.rs index e6737c61..9e31af29 100644 --- a/async-openai/src/usage.rs +++ b/async-openai/src/usage.rs @@ -1,117 +1,97 @@ -use serde::Serialize; - -use crate::{config::Config, error::OpenAIError, types::admin::usage::UsageResponse, Client}; +use crate::{ + config::Config, error::OpenAIError, types::admin::usage::UsageResponse, Client, RequestOptions, +}; /// Manage organization usage data. Get usage details for various API endpoints including /// completions, embeddings, images, audio, moderations, vector stores, and code interpreter sessions. pub struct Usage<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Usage<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Get audio speeches usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn audio_speeches(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn audio_speeches(&self) -> Result { self.client - .get_with_query("/organization/usage/audio_speeches", &query) + .get("/organization/usage/audio_speeches", &self.request_options) .await } /// Get audio transcriptions usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn audio_transcriptions(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn audio_transcriptions(&self) -> Result { self.client - .get_with_query("/organization/usage/audio_transcriptions", &query) + .get( + "/organization/usage/audio_transcriptions", + &self.request_options, + ) .await } /// Get code interpreter sessions usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn code_interpreter_sessions( - &self, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn code_interpreter_sessions(&self) -> Result { self.client - .get_with_query("/organization/usage/code_interpreter_sessions", &query) + .get( + "/organization/usage/code_interpreter_sessions", + &self.request_options, + ) .await } /// Get completions usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn completions(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn completions(&self) -> Result { self.client - .get_with_query("/organization/usage/completions", &query) + .get("/organization/usage/completions", &self.request_options) .await } /// Get embeddings usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn embeddings(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn embeddings(&self) -> Result { self.client - .get_with_query("/organization/usage/embeddings", &query) + .get("/organization/usage/embeddings", &self.request_options) .await } /// Get images usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn images(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn images(&self) -> Result { self.client - .get_with_query("/organization/usage/images", &query) + .get("/organization/usage/images", &self.request_options) .await } /// Get moderations usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn moderations(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn moderations(&self) -> Result { self.client - .get_with_query("/organization/usage/moderations", &query) + .get("/organization/usage/moderations", &self.request_options) .await } /// Get vector stores usage details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn vector_stores(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn vector_stores(&self) -> Result { self.client - .get_with_query("/organization/usage/vector_stores", &query) + .get("/organization/usage/vector_stores", &self.request_options) .await } /// Get costs details for the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn costs(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn costs(&self) -> Result { self.client - .get_with_query("/organization/costs", &query) + .get("/organization/costs", &self.request_options) .await } } diff --git a/async-openai/src/users.rs b/async-openai/src/users.rs index a958acb3..66fba4aa 100644 --- a/async-openai/src/users.rs +++ b/async-openai/src/users.rs @@ -1,30 +1,29 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::admin::users::{User, UserDeleteResponse, UserListResponse, UserRoleUpdateRequest}, - Client, + Client, RequestOptions, }; /// Manage users and their role in an organization. Users will be automatically added to the Default project. pub struct Users<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Users<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Lists all of the users in the organization. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query("/organization/users", &query) + .get("/organization/users", &self.request_options) .await } @@ -36,7 +35,11 @@ impl<'c, C: Config> Users<'c, C> { request: UserRoleUpdateRequest, ) -> Result { self.client - .post(format!("/organization/users/{user_id}").as_str(), request) + .post( + format!("/organization/users/{user_id}").as_str(), + request, + &self.request_options, + ) .await } @@ -44,7 +47,10 @@ impl<'c, C: Config> Users<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, user_id: &str) -> Result { self.client - .get(format!("/organization/users/{user_id}").as_str()) + .get( + format!("/organization/users/{user_id}").as_str(), + &self.request_options, + ) .await } @@ -52,7 +58,10 @@ impl<'c, C: Config> Users<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, user_id: &str) -> Result { self.client - .delete(format!("/organizations/users/{user_id}").as_str()) + .delete( + format!("/organizations/users/{user_id}").as_str(), + &self.request_options, + ) .await } } diff --git a/async-openai/src/vector_store_file_batches.rs b/async-openai/src/vector_store_file_batches.rs index e7a34a33..72d3ee82 100644 --- a/async-openai/src/vector_store_file_batches.rs +++ b/async-openai/src/vector_store_file_batches.rs @@ -1,12 +1,10 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, types::vectorstores::{ CreateVectorStoreFileBatchRequest, ListVectorStoreFilesResponse, VectorStoreFileBatchObject, }, - Client, + Client, RequestOptions, }; /// Vector store file batches represent operations to add multiple files to a vector store. @@ -15,6 +13,7 @@ use crate::{ pub struct VectorStoreFileBatches<'c, C: Config> { client: &'c Client, pub vector_store_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> VectorStoreFileBatches<'c, C> { @@ -22,6 +21,7 @@ impl<'c, C: Config> VectorStoreFileBatches<'c, C> { Self { client, vector_store_id: vector_store_id.into(), + request_options: RequestOptions::new(), } } @@ -35,6 +35,7 @@ impl<'c, C: Config> VectorStoreFileBatches<'c, C> { .post( &format!("/vector_stores/{}/file_batches", &self.vector_store_id), request, + &self.request_options, ) .await } @@ -46,10 +47,13 @@ impl<'c, C: Config> VectorStoreFileBatches<'c, C> { batch_id: &str, ) -> Result { self.client - .get(&format!( - "/vector_stores/{}/file_batches/{batch_id}", - &self.vector_store_id - )) + .get( + &format!( + "/vector_stores/{}/file_batches/{batch_id}", + &self.vector_store_id + ), + &self.request_options, + ) .await } @@ -63,27 +67,24 @@ impl<'c, C: Config> VectorStoreFileBatches<'c, C> { &self.vector_store_id ), serde_json::json!({}), + &self.request_options, ) .await } /// Returns a list of vector store files in a batch. - #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list_files( + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn list_files( &self, batch_id: &str, - query: &Q, - ) -> Result - where - Q: Serialize + ?Sized, - { + ) -> Result { self.client - .get_with_query( + .get( &format!( "/vector_stores/{}/file_batches/{batch_id}/files", &self.vector_store_id ), - &query, + &self.request_options, ) .await } diff --git a/async-openai/src/vector_store_files.rs b/async-openai/src/vector_store_files.rs index 1b72dc2c..164f7b09 100644 --- a/async-openai/src/vector_store_files.rs +++ b/async-openai/src/vector_store_files.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -8,7 +6,7 @@ use crate::{ UpdateVectorStoreFileAttributesRequest, VectorStoreFileContentResponse, VectorStoreFileObject, }, - Client, + Client, RequestOptions, }; /// Vector store files represent files inside a vector store. @@ -17,6 +15,7 @@ use crate::{ pub struct VectorStoreFiles<'c, C: Config> { client: &'c Client, pub vector_store_id: String, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> VectorStoreFiles<'c, C> { @@ -24,6 +23,7 @@ impl<'c, C: Config> VectorStoreFiles<'c, C> { Self { client, vector_store_id: vector_store_id.into(), + request_options: RequestOptions::new(), } } @@ -37,6 +37,7 @@ impl<'c, C: Config> VectorStoreFiles<'c, C> { .post( &format!("/vector_stores/{}/files", &self.vector_store_id), request, + &self.request_options, ) .await } @@ -45,10 +46,10 @@ impl<'c, C: Config> VectorStoreFiles<'c, C> { #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, file_id: &str) -> Result { self.client - .get(&format!( - "/vector_stores/{}/files/{file_id}", - &self.vector_store_id - )) + .get( + &format!("/vector_stores/{}/files/{file_id}", &self.vector_store_id), + &self.request_options, + ) .await } @@ -59,23 +60,20 @@ impl<'c, C: Config> VectorStoreFiles<'c, C> { file_id: &str, ) -> Result { self.client - .delete(&format!( - "/vector_stores/{}/files/{file_id}", - &self.vector_store_id - )) + .delete( + &format!("/vector_stores/{}/files/{file_id}", &self.vector_store_id), + &self.request_options, + ) .await } /// Returns a list of vector store files. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { self.client - .get_with_query( + .get( &format!("/vector_stores/{}/files", &self.vector_store_id), - &query, + &self.request_options, ) .await } @@ -91,6 +89,7 @@ impl<'c, C: Config> VectorStoreFiles<'c, C> { .post( &format!("/vector_stores/{}/files/{file_id}", &self.vector_store_id), request, + &self.request_options, ) .await } @@ -102,10 +101,13 @@ impl<'c, C: Config> VectorStoreFiles<'c, C> { file_id: &str, ) -> Result { self.client - .get(&format!( - "/vector_stores/{}/files/{file_id}/content", - &self.vector_store_id - )) + .get( + &format!( + "/vector_stores/{}/files/{file_id}/content", + &self.vector_store_id + ), + &self.request_options, + ) .await } } diff --git a/async-openai/src/vector_stores.rs b/async-openai/src/vector_stores.rs index 53764edd..57d75586 100644 --- a/async-openai/src/vector_stores.rs +++ b/async-openai/src/vector_stores.rs @@ -1,5 +1,3 @@ -use serde::Serialize; - use crate::{ config::Config, error::OpenAIError, @@ -9,16 +7,20 @@ use crate::{ VectorStoreSearchResultsPage, }, vector_store_file_batches::VectorStoreFileBatches, - Client, VectorStoreFiles, + Client, RequestOptions, VectorStoreFiles, }; pub struct VectorStores<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> VectorStores<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// [VectorStoreFiles] API group @@ -37,24 +39,28 @@ impl<'c, C: Config> VectorStores<'c, C> { &self, request: CreateVectorStoreRequest, ) -> Result { - self.client.post("/vector_stores", request).await + self.client + .post("/vector_stores", request, &self.request_options) + .await } /// Retrieves a vector store. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, vector_store_id: &str) -> Result { self.client - .get(&format!("/vector_stores/{vector_store_id}")) + .get( + &format!("/vector_stores/{vector_store_id}"), + &self.request_options, + ) .await } /// Returns a list of vector stores. - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/vector_stores", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client + .get("/vector_stores", &self.request_options) + .await } /// Delete a vector store. @@ -64,7 +70,10 @@ impl<'c, C: Config> VectorStores<'c, C> { vector_store_id: &str, ) -> Result { self.client - .delete(&format!("/vector_stores/{vector_store_id}")) + .delete( + &format!("/vector_stores/{vector_store_id}"), + &self.request_options, + ) .await } @@ -76,7 +85,11 @@ impl<'c, C: Config> VectorStores<'c, C> { request: UpdateVectorStoreRequest, ) -> Result { self.client - .post(&format!("/vector_stores/{vector_store_id}"), request) + .post( + &format!("/vector_stores/{vector_store_id}"), + request, + &self.request_options, + ) .await } @@ -88,7 +101,11 @@ impl<'c, C: Config> VectorStores<'c, C> { request: VectorStoreSearchRequest, ) -> Result { self.client - .post(&format!("/vector_stores/{vector_store_id}/search"), request) + .post( + &format!("/vector_stores/{vector_store_id}/search"), + request, + &self.request_options, + ) .await } } diff --git a/async-openai/src/video.rs b/async-openai/src/video.rs index 1c4df63e..fcab4a8c 100644 --- a/async-openai/src/video.rs +++ b/async-openai/src/video.rs @@ -3,22 +3,24 @@ use crate::{ error::OpenAIError, types::videos::{ CreateVideoRequest, ListVideosResponse, RemixVideoRequest, VideoJob, VideoJobMetadata, - VideoVariant, }, - Client, + Client, RequestOptions, }; use bytes::Bytes; -use serde::Serialize; /// Video generation with Sora /// Related guide: [Video generation](https://platform.openai.com/docs/guides/video-generation) pub struct Videos<'c, C: Config> { client: &'c Client, + pub(crate) request_options: RequestOptions, } impl<'c, C: Config> Videos<'c, C> { pub fn new(client: &'c Client) -> Self { - Self { client } + Self { + client, + request_options: RequestOptions::new(), + } } /// Create a video @@ -28,7 +30,9 @@ impl<'c, C: Config> Videos<'c, C> { where_clause = "reqwest::multipart::Form: crate::traits::AsyncTryFrom", )] pub async fn create(&self, request: CreateVideoRequest) -> Result { - self.client.post_form("/videos", request).await + self.client + .post_form("/videos", request, &self.request_options) + .await } /// Create a video remix @@ -39,42 +43,44 @@ impl<'c, C: Config> Videos<'c, C> { request: RemixVideoRequest, ) -> Result { self.client - .post(&format!("/videos/{video_id}/remix"), request) + .post( + &format!("/videos/{video_id}/remix"), + request, + &self.request_options, + ) .await } /// Retrieves a video by its ID. #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn retrieve(&self, video_id: &str) -> Result { - self.client.get(&format!("/videos/{}", video_id)).await + self.client + .get(&format!("/videos/{}", video_id), &self.request_options) + .await } /// Delete a Video #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] pub async fn delete(&self, video_id: &str) -> Result { - self.client.delete(&format!("/videos/{}", video_id)).await + self.client + .delete(&format!("/videos/{}", video_id), &self.request_options) + .await } /// List Videos - #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] - pub async fn list(&self, query: &Q) -> Result - where - Q: Serialize + ?Sized, - { - self.client.get_with_query("/videos", &query).await + #[crate::byot(R = serde::de::DeserializeOwned)] + pub async fn list(&self) -> Result { + self.client.get("/videos", &self.request_options).await } - /// Download video content - pub async fn download_content( - &self, - video_id: &str, - variant: VideoVariant, - ) -> Result { + /// Download video content. + /// Variant can be provided as query parameter + pub async fn download_content(&self, video_id: &str) -> Result { let (bytes, _headers) = self .client - .get_raw_with_query( + .get_raw( &format!("/videos/{video_id}/content"), - &[("variant", variant)], + &self.request_options, ) .await?; Ok(bytes) diff --git a/async-openai/tests/bring-your-own-type.rs b/async-openai/tests/bring-your-own-type.rs index b8e8287f..f9628c8e 100644 --- a/async-openai/tests/bring-your-own-type.rs +++ b/async-openai/tests/bring-your-own-type.rs @@ -2,7 +2,7 @@ //! The purpose of this test to make sure that all _byot methods compiles with custom types. use std::pin::Pin; -use async_openai::{error::OpenAIError, Client}; +use async_openai::{error::OpenAIError, traits::RequestOptionsBuilder, Client}; use futures::Stream; use serde_json::{json, Value}; @@ -23,7 +23,12 @@ async fn test_byot_files() { let client = Client::new(); let _r: Result = client.files().create_byot(MyJson(json!({}))).await; - let _r: Result = client.files().list_byot([("limit", "2")]).await; + let _r: Result = client + .files() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.files().retrieve_byot("file_id").await; let _r: Result = client.files().delete_byot("file_id").await; } @@ -35,7 +40,12 @@ async fn test_byot_assistants() { let _r: Result = client.assistants().create_byot(json!({})).await; let _r: Result = client.assistants().retrieve_byot("aid").await; let _r: Result = client.assistants().update_byot("aid", json!({})).await; - let _r: Result = client.assistants().list_byot([("limit", 2)]).await; + let _r: Result = client + .assistants() + .query(&[("limit", 2)]) + .unwrap() + .list_byot() + .await; } #[tokio::test] @@ -125,7 +135,9 @@ async fn test_byot_fine_tunning() { let _r: Result = client.fine_tuning().create_byot(json!({})).await; let _r: Result = client .fine_tuning() - .list_paginated_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_paginated_byot() .await; let _r: Result = client .fine_tuning() @@ -135,11 +147,15 @@ async fn test_byot_fine_tunning() { client.fine_tuning().cancel_byot("fine_tuning_job_id").await; let _r: Result = client .fine_tuning() - .list_events_byot("fine_tuning_job_id", [("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_events_byot("fine_tuning_job_id") .await; let _r: Result = client .fine_tuning() - .list_checkpoints_byot("fine_tuning_job_id", [("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_checkpoints_byot("fine_tuning_job_id") .await; } @@ -190,7 +206,9 @@ async fn test_byot_messages() { let _r: Result = client .threads() .messages("thread_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client .threads() @@ -226,7 +244,9 @@ async fn test_byot_runs() { let _r: Result = client .threads() .runs("thread_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client .threads() @@ -259,7 +279,9 @@ async fn test_byot_run_steps() { .threads() .runs("thread_id") .steps("run_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; } @@ -284,7 +306,9 @@ async fn test_byot_vector_store_files() { let _r: Result = client .vector_stores() .files("vector_store_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; } @@ -309,7 +333,9 @@ async fn test_byot_vector_store_file_batches() { let _r: Result = client .vector_stores() .file_batches("vector_store_id") - .list_files_byot("batch_id", [("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_files_byot("batch_id") .await; } @@ -317,7 +343,12 @@ async fn test_byot_vector_store_file_batches() { async fn test_byot_batches() { let client = Client::new(); let _r: Result = client.batches().create_byot(json!({})).await; - let _r: Result = client.batches().list_byot([("limit", "2")]).await; + let _r: Result = client + .batches() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.batches().retrieve_byot("batch_id").await; let _r: Result = client.batches().cancel_byot("batch_id").await; } @@ -325,8 +356,13 @@ async fn test_byot_batches() { #[tokio::test] async fn test_byot_audit_logs() { let client = Client::new(); - let _r: Result = - client.admin().audit_logs().get_byot([("limit", "2")]).await; + let _r: Result = client + .admin() + .audit_logs() + .query(&[("limit", "2")]) + .unwrap() + .get_byot() + .await; } #[tokio::test] @@ -335,15 +371,26 @@ async fn test_byot_invites() { let _r: Result = client.admin().invites().create_byot(json!({})).await; let _r: Result = client.admin().invites().retrieve_byot("invite_id").await; let _r: Result = client.admin().invites().delete_byot("invite_id").await; - let _r: Result = client.admin().invites().list_byot([("limit", "2")]).await; + let _r: Result = client + .admin() + .invites() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; } #[tokio::test] async fn test_byot_projects() { let client = Client::new(); - let _r: Result = - client.admin().projects().list_byot([("limit", "2")]).await; + let _r: Result = client + .admin() + .projects() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.admin().projects().create_byot(json!({})).await; let _r: Result = client.admin().projects().retrieve_byot("project_id").await; @@ -363,7 +410,9 @@ async fn test_byot_project_api_keys() { .admin() .projects() .api_keys("project_id") - .list_byot([("query", "2")]) + .query(&[("query", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client @@ -410,7 +459,9 @@ async fn test_byot_project_service_accounts() { .admin() .projects() .service_accounts("project_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; } @@ -435,7 +486,9 @@ async fn test_byot_project_users() { .admin() .projects() .users("project_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client @@ -464,7 +517,13 @@ async fn test_byot_uploads() { async fn test_byot_users() { let client = Client::new(); - let _r: Result = client.admin().users().list_byot([("limit", "2")]).await; + let _r: Result = client + .admin() + .users() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client .admin() .users() @@ -483,7 +542,12 @@ async fn test_byot_vector_stores() { .vector_stores() .retrieve_byot("vector_store_id") .await; - let _r: Result = client.vector_stores().list_byot([("limit", "2")]).await; + let _r: Result = client + .vector_stores() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.vector_stores().delete_byot("vector_store_id").await; let _r: Result = client @@ -513,13 +577,17 @@ async fn test_byot_responses() { client.responses().create_stream_byot(json!({})).await; let _r: Result = client .responses() - .retrieve_byot("response_id", [("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .retrieve_byot("response_id") .await; let _r: Result = client.responses().delete_byot("response_id").await; let _r: Result = client.responses().cancel_byot("response_id").await; let _r: Result = client .responses() - .list_input_items_byot("response_id", [("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_input_items_byot("response_id") .await; let _r: Result = client .responses() @@ -556,7 +624,9 @@ async fn test_byot_conversation_items() { let _r: Result = client .conversations() .items("conversation_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client .conversations() @@ -574,21 +644,60 @@ async fn test_byot_conversation_items() { async fn test_byot_usage() { let client = Client::new(); - let _r: Result = client.usage().audio_speeches_byot([("limit", "2")]).await; let _r: Result = client .usage() - .audio_transcriptions_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .audio_speeches_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .audio_transcriptions_byot() .await; let _r: Result = client .usage() - .code_interpreter_sessions_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .code_interpreter_sessions_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .completions_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .embeddings_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .images_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .moderations_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .vector_stores_byot() + .await; + let _r: Result = client + .usage() + .query(&[("limit", "2")]) + .unwrap() + .costs_byot() .await; - let _r: Result = client.usage().completions_byot([("limit", "2")]).await; - let _r: Result = client.usage().embeddings_byot([("limit", "2")]).await; - let _r: Result = client.usage().images_byot([("limit", "2")]).await; - let _r: Result = client.usage().moderations_byot([("limit", "2")]).await; - let _r: Result = client.usage().vector_stores_byot([("limit", "2")]).await; - let _r: Result = client.usage().costs_byot([("limit", "2")]).await; } #[tokio::test] @@ -598,15 +707,22 @@ async fn test_byot_chatkit() { let _r: Result = client.chatkit().sessions().create_byot(json!({})).await; let _r: Result = client.chatkit().sessions().cancel_byot("session_id").await; - let _r: Result = - client.chatkit().threads().list_byot([("limit", "2")]).await; + let _r: Result = client + .chatkit() + .threads() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.chatkit().threads().retrieve_byot("thread_id").await; let _r: Result = client.chatkit().threads().delete_byot("thread_id").await; let _r: Result = client .chatkit() .threads() - .list_items_byot("thread_id", [("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_items_byot("thread_id") .await; } @@ -615,7 +731,12 @@ async fn test_byot_containers() { let client = Client::new(); let _r: Result = client.containers().create_byot(json!({})).await; - let _r: Result = client.containers().list_byot([("limit", "2")]).await; + let _r: Result = client + .containers() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.containers().retrieve_byot("container_id").await; let _r: Result = client.containers().delete_byot("container_id").await; } @@ -632,7 +753,9 @@ async fn test_byot_container_files() { let _r: Result = client .containers() .files("container_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client .containers() @@ -650,8 +773,13 @@ async fn test_byot_container_files() { async fn test_byot_admin_api_keys() { let client = Client::new(); - let _r: Result = - client.admin().api_keys().list_byot([("limit", "2")]).await; + let _r: Result = client + .admin() + .api_keys() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.admin().api_keys().retrieve_byot("key_id").await; let _r: Result = client.admin().api_keys().delete_byot("key_id").await; } @@ -663,7 +791,9 @@ async fn test_byot_certificates() { let _r: Result = client .admin() .certificates() - .list_organization_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_organization_byot() .await; let _r: Result = client .admin() @@ -690,7 +820,9 @@ async fn test_byot_project_rate_limits() { .admin() .projects() .rate_limits("project_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client .admin() @@ -704,7 +836,12 @@ async fn test_byot_project_rate_limits() { async fn test_byot_evals() { let client = Client::new(); - let _r: Result = client.evals().list_byot([("limit", "2")]).await; + let _r: Result = client + .evals() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; let _r: Result = client.evals().create_byot(json!({})).await; let _r: Result = client.evals().retrieve_byot("eval_id").await; let _r: Result = client.evals().update_byot("eval_id", json!({})).await; @@ -718,7 +855,9 @@ async fn test_byot_eval_runs() { let _r: Result = client .evals() .runs("eval_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client.evals().runs("eval_id").create_byot(json!({})).await; @@ -736,7 +875,9 @@ async fn test_byot_eval_run_output_items() { .evals() .runs("eval_id") .output_items("run_id") - .list_byot([("limit", "2")]) + .query(&[("limit", "2")]) + .unwrap() + .list_byot() .await; let _r: Result = client .evals() @@ -754,5 +895,10 @@ async fn test_byot_videos() { let _r: Result = client.videos().remix_byot("video_id", json!({})).await; let _r: Result = client.videos().retrieve_byot("video_id").await; let _r: Result = client.videos().delete_byot("video_id").await; - let _r: Result = client.videos().list_byot([("limit", "2")]).await; + let _r: Result = client + .videos() + .query(&[("limit", "2")]) + .unwrap() + .list_byot() + .await; } diff --git a/examples/assistants-code-interpreter/src/main.rs b/examples/assistants-code-interpreter/src/main.rs index 48fe6d7a..58848c84 100644 --- a/examples/assistants-code-interpreter/src/main.rs +++ b/examples/assistants-code-interpreter/src/main.rs @@ -1,6 +1,7 @@ use std::error::Error; use async_openai::{ + traits::RequestOptionsBuilder, types::assistants::{ AssistantToolCodeInterpreterResources, AssistantTools, CreateAssistantRequestArgs, CreateMessageRequestArgs, CreateRunRequest, CreateThreadRequest, MessageContent, @@ -82,7 +83,8 @@ async fn main() -> Result<(), Box> { let messages = client .threads() .messages(&thread.id) - .list(&[("limit", "10")]) + .query(&[("limit", "10")])? + .list() .await?; for message_obj in messages.data { diff --git a/examples/assistants-file-search/src/main.rs b/examples/assistants-file-search/src/main.rs index 3c84f808..0ac1907f 100644 --- a/examples/assistants-file-search/src/main.rs +++ b/examples/assistants-file-search/src/main.rs @@ -1,6 +1,7 @@ use std::error::Error; use async_openai::{ + traits::RequestOptionsBuilder, types::assistants::{ AssistantToolFileSearchResources, AssistantToolsFileSearch, CreateAssistantRequestArgs, CreateMessageRequestArgs, CreateRunRequest, CreateThreadRequest, MessageAttachment, @@ -131,7 +132,8 @@ async fn main() -> Result<(), Box> { let messages = client .threads() .messages(&thread.id) - .list(&[("limit", "10")]) + .query(&[("limit", "10")])? + .list() .await?; for message_obj in messages.data { diff --git a/examples/assistants/src/main.rs b/examples/assistants/src/main.rs index 07bb2731..370937f1 100644 --- a/examples/assistants/src/main.rs +++ b/examples/assistants/src/main.rs @@ -1,4 +1,5 @@ use async_openai::{ + traits::RequestOptionsBuilder, types::assistants::{ CreateAssistantRequestArgs, CreateMessageRequestArgs, CreateRunRequestArgs, CreateThreadRequestArgs, MessageContent, MessageRole, RunStatus, @@ -98,7 +99,12 @@ async fn main() -> Result<(), Box> { // in the thread //retrieve the response from the run - let response = client.threads().messages(&thread.id).list(&query).await?; + let response = client + .threads() + .messages(&thread.id) + .query(&query)? + .list() + .await?; //get the message id from the response let message_id = response.data.first().unwrap().id.clone(); //get the message from the response diff --git a/examples/chat-store/src/main.rs b/examples/chat-store/src/main.rs index ea6e21f6..3de297a7 100644 --- a/examples/chat-store/src/main.rs +++ b/examples/chat-store/src/main.rs @@ -1,4 +1,5 @@ use async_openai::{ + traits::RequestOptionsBuilder, types::chat::{ ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs, @@ -52,7 +53,8 @@ async fn main() -> Result<(), Box> { let chat_completion_messages = client .chat() - .messages(&response.id, &[("limit", 10)]) + .query(&[("limit", 10)])? + .messages(&response.id) .await?; println!("--------------------------------"); @@ -60,7 +62,7 @@ async fn main() -> Result<(), Box> { println!("{:#?}", chat_completion_messages); // list all chat completions - let chat_completions = client.chat().list(&[("limit", 10)]).await?; + let chat_completions = client.chat().query(&[("limit", 10)])?.list().await?; println!("--------------------------------"); println!("Retrieved chat completions:\n"); diff --git a/examples/chat/src/main.rs b/examples/chat/src/main.rs index 0c529bfc..07b3b63c 100644 --- a/examples/chat/src/main.rs +++ b/examples/chat/src/main.rs @@ -1,6 +1,7 @@ use std::error::Error; use async_openai::{ + traits::RequestOptionsBuilder, types::chat::{ ChatCompletionRequestAssistantMessageArgs, ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs, @@ -37,7 +38,11 @@ async fn main() -> Result<(), Box> { println!("{}", serde_json::to_string(&request).unwrap()); - let response = client.chat().create(request).await?; + let response = client + .chat() + .query(&vec![("limit", 10)])? + .create(request) + .await?; println!("\nResponse:\n"); for choice in response.choices { diff --git a/examples/containers/src/main.rs b/examples/containers/src/main.rs index f3c54e81..6a65e799 100644 --- a/examples/containers/src/main.rs +++ b/examples/containers/src/main.rs @@ -1,4 +1,5 @@ use async_openai::{ + traits::RequestOptionsBuilder, types::containers::{ ContainerExpiresAfter, ContainerExpiresAfterAnchor, CreateContainerFileRequest, CreateContainerRequestArgs, @@ -30,7 +31,7 @@ async fn main() -> Result<(), Box> { // List all containers println!("\nListing all containers..."); let query = [("limit", "10")]; - let list_response = client.containers().list(&query).await?; + let list_response = client.containers().query(&query)?.list().await?; println!("Found {} containers", list_response.data.len()); for c in &list_response.data { println!(" - {} ({})", c.name, c.id); @@ -67,7 +68,8 @@ async fn main() -> Result<(), Box> { let files_list = client .containers() .files(&container.id) - .list(&files_query) + .query(&files_query)? + .list() .await?; println!("Found {} files", files_list.data.len()); for f in &files_list.data { diff --git a/examples/conversations/src/main.rs b/examples/conversations/src/main.rs index c62e27f3..88306882 100644 --- a/examples/conversations/src/main.rs +++ b/examples/conversations/src/main.rs @@ -1,4 +1,5 @@ use async_openai::{ + traits::RequestOptionsBuilder, types::responses::{ ConversationItem, CreateConversationItemsRequestArgs, CreateConversationRequestArgs, EasyInputContent, EasyInputMessage, InputItem, ListConversationItemsQuery, MessageType, @@ -77,7 +78,8 @@ async fn main() -> Result<(), Box> { let all_items = client .conversations() .items(&conversation.id) - .list(&query) + .query(&query)? + .list() .await?; println!("Total items retrieved: {}", all_items.data.len()); diff --git a/examples/usage/src/main.rs b/examples/usage/src/main.rs index a9a9b304..a589f16e 100644 --- a/examples/usage/src/main.rs +++ b/examples/usage/src/main.rs @@ -2,6 +2,7 @@ use std::error::Error; use std::time::{SystemTime, UNIX_EPOCH}; use async_openai::{ + traits::RequestOptionsBuilder, types::admin::usage::{UsageQueryParams, UsageResult}, Client, }; @@ -36,7 +37,7 @@ async fn main() -> Result<(), Box> { // Audio Speeches println!("=== Audio Speeches Usage ==="); - match client.usage().audio_speeches(&query).await { + match client.usage().query(&query)?.audio_speeches().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -77,7 +78,7 @@ async fn main() -> Result<(), Box> { // Audio Transcriptions println!("=== Audio Transcriptions Usage ==="); - match client.usage().audio_transcriptions(&query).await { + match client.usage().query(&query)?.audio_transcriptions().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -118,7 +119,12 @@ async fn main() -> Result<(), Box> { // Code Interpreter Sessions println!("=== Code Interpreter Sessions Usage ==="); - match client.usage().code_interpreter_sessions(&query).await { + match client + .usage() + .query(&query)? + .code_interpreter_sessions() + .await + { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -156,7 +162,7 @@ async fn main() -> Result<(), Box> { // Completions println!("=== Completions Usage ==="); - match client.usage().completions(&query).await { + match client.usage().query(&query)?.completions().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -221,7 +227,7 @@ async fn main() -> Result<(), Box> { // Embeddings println!("=== Embeddings Usage ==="); - match client.usage().embeddings(&query).await { + match client.usage().query(&query)?.embeddings().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -262,7 +268,7 @@ async fn main() -> Result<(), Box> { // Images println!("=== Images Usage ==="); - match client.usage().images(&query).await { + match client.usage().query(&query)?.images().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -309,7 +315,7 @@ async fn main() -> Result<(), Box> { // Moderations println!("=== Moderations Usage ==="); - match client.usage().moderations(&query).await { + match client.usage().query(&query)?.moderations().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -350,7 +356,7 @@ async fn main() -> Result<(), Box> { // Vector Stores println!("=== Vector Stores Usage ==="); - match client.usage().vector_stores(&query).await { + match client.usage().query(&query)?.vector_stores().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); for bucket in &response.data { @@ -389,7 +395,7 @@ async fn main() -> Result<(), Box> { // Costs println!("=== Costs ==="); - match client.usage().costs(&query).await { + match client.usage().query(&query)?.costs().await { Ok(response) => { println!("Found {} time buckets", response.data.len()); let mut total_cost = 0.0; diff --git a/examples/video/src/main.rs b/examples/video/src/main.rs index 5f66248e..0c4e8fab 100644 --- a/examples/video/src/main.rs +++ b/examples/video/src/main.rs @@ -1,5 +1,6 @@ use async_openai::{ config::OpenAIConfig, + traits::RequestOptionsBuilder, types::videos::{CreateVideoRequestArgs, VideoJob, VideoSize, VideoVariant}, Client, }; @@ -72,7 +73,7 @@ async fn main() -> Result<(), Box> { let video = create_video(&client).await?; // wait for above video to be "completed" tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; - let videos = client.videos().list(&[("limit", "100")]).await?; + let videos = client.videos().query(&[("limit", "100")])?.list().await?; for video in &videos.data { println!("Video: {:#?}", video); @@ -80,7 +81,8 @@ async fn main() -> Result<(), Box> { if video.status == "completed" { let content = client .videos() - .download_content(&video.id, VideoVariant::Video) + .query(&[("variant", VideoVariant::Video)])? + .download_content(&video.id) .await; if let Ok(content) = content { let output_path = &format!("./data/{}.mp4", video.id);