diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b50ee12..127b1ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,7 +59,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: build - args: --all-features --target ${{ matrix.platform.target }} + args: --all-features --target ${{ matrix.platform.target }} --workspace --exclude xtask - name: Tests if: matrix.platform.target != 'wasm32-unknown-unknown' uses: actions-rs/cargo@v1 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f1c26dd..93790a2 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -45,4 +45,4 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: --all-features -- -Drust-2018-idioms -Dwarnings + args: --all-features -- -Drust-2018-idioms -Drust-2024-compatibility -Dwarnings diff --git a/README.md b/README.md index 38cf9a9..a07c726 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ When updating or adding new parameters and endpoints, make changes directly in t Once your changes are merged, you can update this project as follows (you can also run tasks individually): ```bash -cargo xtask fetch code-gen +cargo xtask fetch preprocess code-gen ``` This will: diff --git a/openapi-generator-template/lib.mustache b/openapi-generator-template/lib.mustache index d9c227b..3280d74 100644 --- a/openapi-generator-template/lib.mustache +++ b/openapi-generator-template/lib.mustache @@ -1,7 +1,8 @@ -#![allow(unused_imports)] +#![allow(clippy::derivable_impls)] #![allow(clippy::empty_docs)] #![allow(clippy::needless_return)] #![allow(elided_lifetimes_in_paths)] +#![allow(unused_imports)] pub mod apis; pub mod models; diff --git a/openapi-generator-template/reqwest/api.mustache b/openapi-generator-template/reqwest/api.mustache index e56af93..9734291 100644 --- a/openapi-generator-template/reqwest/api.mustache +++ b/openapi-generator-template/reqwest/api.mustache @@ -12,11 +12,23 @@ use super::{Error, configuration, ContentType}; {{#-first}} /// struct for passing parameters to the method [`{{operationId}}`] #[derive(Clone, Debug)] -pub struct {{{operationIdCamelCase}}}Params { +pub struct {{{operationIdCamelCase}}}Params{{! + Iterate through ALL parameters in the operation. + Only the requestBody has this extension defined, so it will print "". + The other parameters have nothing, so they will print nothing. + This effectively extract the generic parameter from the requestBody + and places it on the struct definition line. +}}{{#allParams}} +{{{vendorExtensions.x-rust-params-generic-parameter}}} +{{/allParams}} { {{/-first}} {{#description}} /// {{{.}}} {{/description}} + {{#vendorExtensions.x-rust-type}} + pub {{{paramName}}}: {{{.}}}, + {{/vendorExtensions.x-rust-type}} + {{^vendorExtensions.x-rust-type}} pub {{{paramName}}}: {{! ### Option Start }}{{^required}}Option<{{/required}}{{#required}}{{#isNullable}}Option<{{/isNullable}}{{/required}}{{! @@ -30,6 +42,7 @@ pub struct {{{operationIdCamelCase}}}Params { }}{{^required}}>{{/required}}{{#required}}{{#isNullable}}>{{/isNullable}}{{/required}}{{! ### Comma for next arguement }}{{^-last}},{{/-last}} + {{/vendorExtensions.x-rust-type}} {{#-last}} } @@ -94,44 +107,10 @@ pub enum {{{operationIdCamelCase}}}Error { {{#vendorExtensions.x-group-parameters}} pub {{#supportAsync}}async {{/supportAsync}}fn {{{operationId}}}{{{vendorExtensions.x-rust-generic-parameter}}}(configuration: &configuration::Configuration{{#allParams}}{{#-first}}, {{! ### Params -}}params: &{{{operationIdCamelCase}}}Params{{/-first}}{{/allParams}}{{! +}}params: &{{{operationIdCamelCase}}}Params{{#allParams}}{{{vendorExtensions.x-rust-params-generic-parameter}}}{{/allParams}}{{/-first}}{{/allParams}}{{! ### Function return type }}) -> Result<{{#vendorExtensions.x-rust-return-type}}{{{.}}}{{/vendorExtensions.x-rust-return-type}}{{^vendorExtensions.x-rust-return-type}}{{#isResponseFile}}{{#supportAsync}}reqwest::Response{{/supportAsync}}{{^supportAsync}}reqwest::blocking::Response{{/supportAsync}}{{/isResponseFile}}{{^isResponseFile}}{{#supportMultipleResponses}}ResponseContent<{{{operationIdCamelCase}}}Success>{{/supportMultipleResponses}}{{^supportMultipleResponses}}{{^returnType}}(){{/returnType}}{{{returnType}}}{{/supportMultipleResponses}}{{/isResponseFile}}{{/vendorExtensions.x-rust-return-type}}, Error<{{{operationIdCamelCase}}}Error>> { {{/vendorExtensions.x-group-parameters}} -{{^vendorExtensions.x-group-parameters}} -pub {{#supportAsync}}async {{/supportAsync}}fn {{{operationId}}}{{{vendorExtensions.x-rust-generic-parameter}}}(configuration: &configuration::Configuration, {{#allParams}}{{{paramName}}}: {{! -### Option Start -}}{{^required}}Option<{{/required}}{{#required}}{{#isNullable}}Option<{{/isNullable}}{{/required}}{{! -### &str and Vec<&str> -}}{{#isString}}{{#isArray}}Vec<{{/isArray}}{{^isUuid}}&str{{/isUuid}}{{#isArray}}>{{/isArray}}{{/isString}}{{! -### UUIDs -}}{{#isUuid}}{{#isArray}}Vec<{{/isArray}}&str{{#isArray}}>{{/isArray}}{{/isUuid}}{{! -### Models and primative types -}}{{^isString}}{{^isUuid}}{{^isPrimitiveType}}{{^isContainer}}models::{{/isContainer}}{{/isPrimitiveType}}{{{dataType}}}{{/isUuid}}{{/isString}}{{! -### Option End -}}{{^required}}>{{/required}}{{#required}}{{#isNullable}}>{{/isNullable}}{{/required}}{{! -### Comma for next arguement -}}{{^-last}}, {{/-last}}{{/allParams}}{{! -### Function return type -}}) -> Result<{{#vendorExtensions.x-rust-return-type}}{{{vendorExtensions.x-rust-return-type}}}{{/vendorExtensions.x-rust-return-type}}{{^vendorExtensions.x-rust-return-type}}{{! -### Response File Support -}}{{#isResponseFile}}{{#supportAsync}}reqwest::Response{{/supportAsync}}{{^supportAsync}}reqwest::blocking::Response{{/supportAsync}}{{/isResponseFile}}{{! -### Regular Responses -}}{{^isResponseFile}}{{! -### Multi response support -}}{{#supportMultipleResponses}}ResponseContent<{{{operationIdCamelCase}}}Success>{{/supportMultipleResponses}}{{! -### Regular return type -}}{{^supportMultipleResponses}}{{^returnType}}(){{/returnType}}{{{returnType}}}{{{vendorExtensions.x-rust-return-type-generic-parameter}}}{{/supportMultipleResponses}}{{/isResponseFile}}{{/vendorExtensions.x-rust-return-type}}{{! -### Error Type -}}, Error<{{{operationIdCamelCase}}}Error>> { - {{#allParams.0}} - // add a prefix to parameters to efficiently prevent name collisions - {{/allParams.0}} - {{#allParams}} - let {{{vendorExtensions.x-rust-param-identifier}}} = {{{paramName}}}; - {{/allParams}} -{{/vendorExtensions.x-group-parameters}} - let uri_str = format!("{}{{{path}}}", configuration.base_path{{#pathParams}}, {{{baseName}}}={{#isString}}crate::apis::urlencode(&{{/isString}}{{{vendorExtensions.x-rust-param-identifier}}}{{^required}}.unwrap(){{/required}}{{#required}}{{#isNullable}}.unwrap(){{/isNullable}}{{/required}}{{#isArray}}.join(",").as_ref(){{/isArray}}{{^isString}}{{^isUuid}}{{^isPrimitiveType}}{{^isContainer}}.to_string(){{/isContainer}}{{/isPrimitiveType}}{{/isUuid}}{{/isString}}{{#isString}}){{/isString}}{{/pathParams}}); let mut req_builder = configuration.client.request(reqwest::Method::{{{httpMethod}}}, &uri_str); @@ -513,4 +492,4 @@ pub {{#supportAsync}}async {{/supportAsync}}fn {{{operationId}}}{{{vendorExtensi } {{/operation}} -{{/operations}} +{{/operations}} \ No newline at end of file diff --git a/preprocessed_openapi.yml b/preprocessed_openapi.yml index 5bbf988..ad50ea7 100644 --- a/preprocessed_openapi.yml +++ b/preprocessed_openapi.yml @@ -348,6 +348,9 @@ paths: description: Can be any key-value pair x-go-type: interface{} required: true + x-rust-params-generic-parameter: + x-rust-type: B + x-rust-generic-parameter: '' delete: tags: - documents @@ -1253,6 +1256,8 @@ paths: description: Can be any key-value pair x-go-type: interface{} required: true + x-rust-params-generic-parameter: + x-rust-type: B responses: '200': description: The document referenced by the ID was updated @@ -1267,6 +1272,7 @@ paths: application/json: schema: $ref: '#/components/schemas/ApiResponse' + x-rust-generic-parameter: '' delete: tags: - documents diff --git a/typesense/src/client/collection/document.rs b/typesense/src/client/collection/document.rs index 1398d60..9a159ec 100644 --- a/typesense/src/client/collection/document.rs +++ b/typesense/src/client/collection/document.rs @@ -4,7 +4,7 @@ //! via a parent `Collection` struct, for example: //! `client.collection::().document("123")` -use crate::{Client, Error, execute_wrapper}; +use crate::{Client, Error, execute_wrapper, traits}; use serde::{Serialize, de::DeserializeOwned}; use typesense_codegen::apis::documents_api; @@ -51,10 +51,32 @@ where let result_value = execute_wrapper!(self, documents_api::get_document, params)?; - // Deserialize the raw JSON value into the user's type T. + // Deserialize the raw JSON value into the user's type D. serde_json::from_value(result_value).map_err(Error::from) } + /// Deletes this individual document from the collection. + /// The deleted document is returned. + /// + /// # Returns + /// A `Result` containing the deleted document deserialized into `D`. + pub async fn delete(&self) -> Result> { + let params = documents_api::DeleteDocumentParams { + collection_name: self.collection_name.to_owned(), + document_id: self.document_id.to_owned(), + }; + + let result_value = execute_wrapper!(self, documents_api::delete_document, params)?; + + // Deserialize the raw JSON value of the deleted document into T. + serde_json::from_value(result_value).map_err(Error::from) + } +} + +impl<'c, 'n, D> Document<'c, 'n, D> +where + D: traits::Document, +{ /// Updates this individual document. The update can be partial. /// The updated full document is returned. /// @@ -68,9 +90,9 @@ where /// # Example /// ```no_run /// # use serde::{Serialize, Deserialize}; - /// # use typesense::{Client, models}; + /// # use typesense::{Client, Typesense, models}; /// # use reqwest::Url; - /// # #[derive(Serialize, Deserialize)] + /// # #[derive(Typesense, Serialize, Deserialize)] /// # struct Book { id: String, title: String, pages: i32 } /// # /// # async fn run() -> Result<(), Box> { @@ -79,7 +101,7 @@ where /// # .api_key("xyz") /// # .build() /// # .unwrap(); - /// let book_update = serde_json::json!({ "pages": 654 }); + /// let book_update = BookPartial { pages: Some(654), ..Default::default() }; /// /// // Simple update /// let updated_book = client.collection_named::("books").document("123") @@ -97,15 +119,15 @@ where /// # Ok(()) /// # } /// ``` - pub async fn update( + pub async fn update( &self, - partial_document: U, + partial_document: &D::Partial, params: Option, ) -> Result> { let params = documents_api::UpdateDocumentParams { collection_name: self.collection_name.to_owned(), document_id: self.document_id.to_owned(), - body: serde_json::to_value(partial_document)?, + body: partial_document, dirty_values: params.and_then(|d| d.dirty_values), }; @@ -114,21 +136,4 @@ where // Deserialize the raw JSON value of the updated document into T. serde_json::from_value(result_value).map_err(Error::from) } - - /// Deletes this individual document from the collection. - /// The deleted document is returned. - /// - /// # Returns - /// A `Result` containing the deleted document deserialized into `D`. - pub async fn delete(&self) -> Result> { - let params = documents_api::DeleteDocumentParams { - collection_name: self.collection_name.to_owned(), - document_id: self.document_id.to_owned(), - }; - - let result_value = execute_wrapper!(self, documents_api::delete_document, params)?; - - // Deserialize the raw JSON value of the deleted document into T. - serde_json::from_value(result_value).map_err(Error::from) - } } diff --git a/typesense/src/client/collection/documents.rs b/typesense/src/client/collection/documents.rs index c2c5d7d..b394475 100644 --- a/typesense/src/client/collection/documents.rs +++ b/typesense/src/client/collection/documents.rs @@ -7,6 +7,7 @@ use crate::{ Client, Error, execute_wrapper, models::{DocumentIndexParameters, SearchResult}, + traits, }; use serde::{Serialize, de::DeserializeOwned}; use typesense_codegen::{ @@ -63,43 +64,6 @@ where execute_wrapper!(self, documents_api::index_document, params) } - /// Creates a new document in the collection. - /// - /// Fails if a document with the same ID already exists. If the document has an `id` field - /// of type `string`, it will be used as the document's ID. Otherwise, Typesense will - /// auto-generate an ID. The newly indexed document is returned. - /// - /// # Arguments - /// * `document` - A serializable struct or a `serde_json::Value` representing the document to create. - /// * `params` - Optional parameters like `dirty_values`. - pub async fn create( - &self, - document: U, - params: Option, - ) -> Result> { - let doc_value = serde_json::to_value(document)?; - let result_value = self.index(doc_value, "create", params).await?; - serde_json::from_value(result_value).map_err(Error::from) - } - - /// Creates a new document or updates an existing one if an ID match is found. - /// - /// This method requires the full document to be sent. For partial updates, use - /// `collection().document("...").update()`. The indexed document is returned. - /// - /// # Arguments - /// * `document` - A serializable struct or a `serde_json::Value` representing the document to upsert. - /// * `params` - Optional parameters like `dirty_values`. - pub async fn upsert( - &self, - document: U, - params: Option, - ) -> Result> { - let doc_value = serde_json::to_value(document)?; - let result_value = self.index(doc_value, "upsert", params).await?; - serde_json::from_value(result_value).map_err(Error::from) - } - // --- Bulk Operation Methods --- /// Imports a batch of documents in JSONL format. @@ -132,7 +96,7 @@ where /// /// # Arguments /// * `params` - An `ExportDocumentsParameters` struct containing options like `filter_by` and `include_fields`. - pub async fn export( + pub async fn export_jsonl( &self, params: ExportDocumentsParameters, ) -> Result> { @@ -164,25 +128,6 @@ where execute_wrapper!(self, documents_api::delete_documents, params) } - /// Updates a batch of documents matching a specific filter condition. - /// - /// # Arguments - /// * `document` - A serializable struct or a `serde_json::Value` containing the fields to update. - /// * `params` - A `UpdateDocumentsParameters` describing the conditions for updating documents. - pub async fn update( - &self, - document: U, - params: UpdateDocumentsParameters, - ) -> Result> - { - let params = documents_api::UpdateDocumentsParams { - collection_name: self.collection_name.to_owned(), - filter_by: params.filter_by, - body: serde_json::to_value(document)?, - }; - execute_wrapper!(self, documents_api::update_documents, params) - } - /// Searches for documents in the collection that match the given criteria. /// The search results will have their `document` field deserialized into type `D`. /// @@ -271,3 +216,64 @@ where execute_wrapper!(self, documents_api::search_collection, search_params) } } + +impl<'c, 'n, D> Documents<'c, 'n, D> +where + D: traits::Document, +{ + /// Creates a new document in the collection. + /// + /// Fails if a document with the same ID already exists. If the document has an `id` field + /// of type `string`, it will be used as the document's ID. Otherwise, Typesense will + /// auto-generate an ID. The newly indexed document is returned. + /// + /// # Arguments + /// * `document` - A document struct to create. + /// * `params` - Optional parameters like `dirty_values`. + pub async fn create( + &self, + document: &D, + params: Option, + ) -> Result> { + let doc_value = serde_json::to_value(document)?; + let result_value = self.index(doc_value, "create", params).await?; + serde_json::from_value(result_value).map_err(Error::from) + } + + /// Creates a new document or updates an existing one if an ID match is found. + /// + /// This method requires the full document to be sent. For partial updates, use + /// `collection().document("...").update()`. The indexed document is returned. + /// + /// # Arguments + /// * `document` - A document struct to upsert. + /// * `params` - Optional parameters like `dirty_values`. + pub async fn upsert( + &self, + document: &D, + params: Option, + ) -> Result> { + let doc_value = serde_json::to_value(document)?; + let result_value = self.index(doc_value, "upsert", params).await?; + serde_json::from_value(result_value).map_err(Error::from) + } + + /// Updates a batch of documents matching a specific filter condition. + /// + /// # Arguments + /// * `document` - A struct containing the fields to update. + /// * `params` - A `UpdateDocumentsParameters` describing the conditions for updating documents. + pub async fn update( + &self, + document: &D::Partial, + params: UpdateDocumentsParameters, + ) -> Result> + { + let params = documents_api::UpdateDocumentsParams { + collection_name: self.collection_name.to_owned(), + filter_by: params.filter_by, + body: document, + }; + execute_wrapper!(self, documents_api::update_documents, params) + } +} diff --git a/typesense/src/traits/document.rs b/typesense/src/traits/document.rs index ace35f2..dd019bd 100644 --- a/typesense/src/traits/document.rs +++ b/typesense/src/traits/document.rs @@ -6,11 +6,16 @@ use crate::models::CollectionSchema; use serde::{Serialize, de::DeserializeOwned}; -/// Trait that should implement every struct that wants to be represented as a Typesense +/// Trait for partial structs +pub trait DocumentPartial: Serialize {} + +/// Trait that every struct should implement that wants to be represented as a Typesense /// Document pub trait Document: DeserializeOwned + Serialize { /// Collection name const COLLECTION_NAME: &'static str; + /// A struct for partial updates + type Partial: DocumentPartial; /// Collection schema associated with the document. fn collection_schema() -> CollectionSchema; diff --git a/typesense/src/traits/mod.rs b/typesense/src/traits/mod.rs index 667128e..bd4aac9 100644 --- a/typesense/src/traits/mod.rs +++ b/typesense/src/traits/mod.rs @@ -4,6 +4,6 @@ mod document; mod field_type; mod multi_search_ext; -pub use document::Document; +pub use document::*; pub use field_type::*; pub use multi_search_ext::MultiSearchResultExt; diff --git a/typesense/tests/client/documents_test.rs b/typesense/tests/client/documents_test.rs index 04040a6..77cf73d 100644 --- a/typesense/tests/client/documents_test.rs +++ b/typesense/tests/client/documents_test.rs @@ -8,11 +8,11 @@ use typesense::models::{ use super::{get_client, new_id}; -async fn run_test_document_lifecycle() { +async fn run_test_schemaless_document_lifecycle() { let client = get_client(); let collection_name = new_id("books"); - // --- 1. Setup: Create a Collection --- + // --- Setup: Create a Collection --- let schema = CollectionSchema { name: collection_name.clone(), fields: vec![ @@ -36,84 +36,64 @@ async fn run_test_document_lifecycle() { ..Default::default() }; - let create_collection_result = client.collections().create(schema).await; - assert!( - create_collection_result.is_ok(), - "Failed to create collection" - ); + let _create_collection_response = client + .collections() + .create(schema) + .await + .expect("Failed to create collection"); let book_1_id = &new_id("document_1"); - let book_1 = json!({ - "id": book_1_id, - "title": "The Hitchhiker's Guide to the Galaxy", - "author": "Douglas Adams", - "publication_year": 1979 - }); - - let book_2 = json!({ - "title": "The Lord of the Rings", - "author": "J.R.R. Tolkien", - "publication_year": 1954 - }); + let book_1 = json!({"id": book_1_id, "title": "The Hitchhiker's Guide to the Galaxy","author": "John","publication_year": 1979}); + let book_2 = + json!({"title": "The Lord of the Rings","author": "John","publication_year": 1954}); + let book_3 = json!({"title": "Book 3","author": "John","publication_year": 2100}); let collection_client = client.collection_schemaless(&collection_name); let documents_client = collection_client.documents(); - // --- 2. Create a document (via `documents().create()`) --- - let create_res = documents_client.create(&book_1, None).await; - assert!(create_res.is_ok(), "Failed to create document 1"); + // --- Bulk Import --- + let new_books_jsonl = format!("{}\n{}\n{}", book_1, book_2, book_3); + let import_params = ImportDocumentsParameters { + action: Some(IndexAction::Create), + ..Default::default() + }; + let import_res = documents_client + .import_jsonl(new_books_jsonl, import_params) + .await; - // --- 3. Upsert a document (via `documents().upsert()`) --- - let upsert_res = documents_client.upsert(&book_2, None).await; - assert!(upsert_res.is_ok(), "Failed to upsert document 2"); + assert!( + !import_res.unwrap().contains("success: false"), + "Bulk import failed" + ); - // --- 4. Retrieve a single document (via `document(id).retrieve()`) --- - let retrieve_res = client + // --- Retrieve a single document (via `document(id).retrieve()`) --- + let retrieved_book = client .collection_schemaless(&collection_name) .document(book_1_id) .retrieve() - .await; - assert!(retrieve_res.is_ok(), "Failed to retrieve document 1"); - assert_eq!(retrieve_res.unwrap(), book_1); + .await + .expect("Failed to retrieve document 1"); + assert_eq!(retrieved_book, book_1); - // --- 5. Search for documents --- + // --- Search for documents --- let search_params = SearchParameters::builder() .q("the") .query_by("title") .build(); - let search_res = documents_client.search(search_params).await; - assert!(search_res.is_ok(), "Search failed"); - assert_eq!(search_res.unwrap().found, Some(2)); - - // --- 6. Update a single document --- - let partial_update = json!({ "publication_year": 1980 }); - let update_res = client - .collection_schemaless(&collection_name) - .document(book_1_id) - .update(&partial_update, None) - .await; - assert!(update_res.is_ok(), "Failed to update document 1"); - - // --- 7. Verify the single update --- - let retrieve_after_update_res = client - .collection_schemaless(&collection_name) - .document(book_1_id) - .retrieve() - .await; - let updated_doc = retrieve_after_update_res.unwrap(); - assert_eq!( - updated_doc.get("publication_year").unwrap().as_i64(), - Some(1980) - ); - - // --- 8. Delete a single document --- - let delete_res = client + let search_res = documents_client + .search(search_params) + .await + .expect("Search failed"); + assert_eq!(search_res.found, Some(2)); + + // --- Delete a single document --- + let _deleted_book = client .collection_schemaless(&collection_name) .document(book_1_id) .delete() - .await; - assert!(delete_res.is_ok(), "Failed to delete document 1"); + .await + .expect("Failed to delete document 1"); - // --- 9. Verify single deletion --- + // --- Verify single deletion --- let retrieve_after_delete_res = client .collection_schemaless(&collection_name) .document(book_1_id) @@ -124,76 +104,39 @@ async fn run_test_document_lifecycle() { "Document should not exist after deletion" ); - // --- 10. Bulk Import --- - let new_books_jsonl = format!( - "{}\n{}", - json!({"title": "Foundation", "author": "Isaac Asimov", "publication_year": 1951}), - json!({"title": "Dune", "author": "Frank Herbert", "publication_year": 1965}) - ); - - let import_params = ImportDocumentsParameters { - action: Some(IndexAction::Create), - ..Default::default() - }; - let import_res = documents_client - .import_jsonl(new_books_jsonl, import_params) - .await; - assert!(import_res.is_ok(), "Bulk import failed"); - - // --- 11. Verify Import via Search --- - let search_after_import_params = SearchParameters { - q: Some("*".to_owned()), - query_by: Some("title".to_owned()), - ..Default::default() - }; - let search_after_import_res = documents_client.search(search_after_import_params).await; - let search_results = search_after_import_res.unwrap(); - // 1 remaining (book_2) + 2 new imports = 3 - assert_eq!(search_results.found, Some(3)); - - // --- 12. Bulk Update (via `documents().update()`) --- - let bulk_update_params = UpdateDocumentsParameters { - filter_by: Some("publication_year:<1960".to_owned()), - }; - let bulk_update_payload = json!({ "author": "Sci-Fi Pioneer" }); - let bulk_update_res = documents_client - .update(bulk_update_payload, bulk_update_params) - .await; - assert!(bulk_update_res.is_ok(), "Bulk update failed"); - // Should update Lord of the Rings (1954) and Foundation (1951) - assert_eq!(bulk_update_res.unwrap().num_updated, 2); - - // --- 13. Export documents (via `documents().export()`) --- + // --- Export documents (via `documents().export_jsonl()`) --- let export_params = ExportDocumentsParameters { - filter_by: Some("author:\"Sci-Fi Pioneer\"".to_owned()), + filter_by: Some("author:John".to_owned()), ..Default::default() }; - let export_res = documents_client.export(export_params).await; - - assert!(export_res.is_ok(), "Export failed"); - let exported_jsonl = export_res.unwrap(); + let exported_jsonl = documents_client + .export_jsonl(export_params) + .await + .expect("Export failed"); // Verify the exported content is a JSONL string with 2 lines. let lines: Vec<&str> = exported_jsonl.trim().split('\n').collect(); assert_eq!(lines.len(), 2, "Exported JSONL should have 2 lines"); let exported_doc_1: serde_json::Value = serde_json::from_str(lines[0]).unwrap(); - assert_eq!(exported_doc_1["author"], "Sci-Fi Pioneer"); + assert_eq!(exported_doc_1["author"], "John"); - // --- 14. Bulk Delete --- + // --- Bulk Delete --- let delete_params = DeleteDocumentsParameters { filter_by: "publication_year:>1960".to_owned(), ..Default::default() }; - let bulk_delete_res = documents_client.delete(delete_params).await; - assert!(bulk_delete_res.is_ok(), "Bulk delete failed"); - // Only "Dune" (1965) should be deleted - assert_eq!(bulk_delete_res.unwrap().num_deleted, 1); + let bulk_delete_response = documents_client + .delete(delete_params) + .await + .expect("Bulk delete failed"); + // Only "The Hitchhiker's Guide to the Galaxy" (1979) should be deleted + assert_eq!(bulk_delete_response.num_deleted, 1); } // --- TESTS FOR GENERIC FEATURES --- /// A strongly-typed representation of a book document. -#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] +#[derive(typesense::Typesense, Serialize, Deserialize, Debug, PartialEq, Clone)] struct Book { id: String, title: String, @@ -237,11 +180,11 @@ async fn run_test_generic_document_lifecycle() { ..Default::default() }; - let create_collection_result = client.collections().create(schema).await; - assert!( - create_collection_result.is_ok(), - "Failed to create collection for generic test" - ); + let _create_collection = client + .collections() + .create(schema) + .await + .expect("Failed to create collection for generic test"); // Use the strongly-typed collection client let typed_collection = client.collection_named::(&collection_name); @@ -263,20 +206,29 @@ async fn run_test_generic_document_lifecycle() { }; // --- 2. Create a document using a typed struct --- - let create_res = typed_collection.documents().create(&book_1, None).await; - assert!(create_res.is_ok(), "Failed to create typed document"); + let create_book = typed_collection + .documents() + .create(&book_1, None) + .await + .expect("Failed to create typed document"); // The created document should be returned and be equal to the input - assert_eq!(create_res.unwrap(), book_1); + assert_eq!(create_book, book_1); // --- 3. Upsert a document using a typed struct --- - let upsert_res = typed_collection.documents().upsert(&book_2, None).await; - assert!(upsert_res.is_ok(), "Failed to upsert typed document"); - assert_eq!(upsert_res.unwrap(), book_2); + let upsert_book = typed_collection + .documents() + .upsert(&book_2, None) + .await + .expect("Failed to upsert typed document"); + assert_eq!(upsert_book, book_2); // --- 4. Retrieve a single document and deserialize into a struct --- - let retrieve_res = typed_collection.document(&book_1.id).retrieve().await; - assert!(retrieve_res.is_ok(), "Failed to retrieve typed document"); - assert_eq!(retrieve_res.unwrap(), book_1); + let retrieve_book = typed_collection + .document(&book_1.id) + .retrieve() + .await + .expect("Failed to retrieve typed document"); + assert_eq!(retrieve_book, book_1); // --- 5. Search for documents with strongly-typed results --- let search_params = SearchParameters { @@ -284,9 +236,11 @@ async fn run_test_generic_document_lifecycle() { query_by: Some("title".to_owned()), ..Default::default() }; - let search_res = typed_collection.documents().search(search_params).await; - assert!(search_res.is_ok(), "Typed search failed"); - let search_results = search_res.unwrap(); + let search_results = typed_collection + .documents() + .search(search_params) + .await + .expect("Typed search failed"); assert_eq!(search_results.found, Some(1)); let hits = search_results.hits.expect("Search should have hits"); @@ -299,39 +253,54 @@ async fn run_test_generic_document_lifecycle() { assert_eq!(hit_doc, &book_1); // --- 6. Update a single document with a partial payload --- - #[derive(Serialize)] - struct BookUpdate { - publication_year: i32, - in_stock: bool, - } - let partial_update_struct = BookUpdate { - publication_year: 1966, - in_stock: false, + let partial_update_struct = BookPartial { + publication_year: Some(1966), + in_stock: Some(Some(false)), + ..Default::default() }; let index_params = DocumentIndexParameters { dirty_values: Some(DirtyValues::CoerceOrReject), }; - let update_res = typed_collection + let updated_book = typed_collection .document(&book_1.id) .update(&partial_update_struct, Some(index_params)) - .await; - assert!(update_res.is_ok(), "Failed to update typed document"); + .await + .expect("Failed to update typed document"); // The returned document should be the full, updated Book struct - let updated_book = update_res.unwrap(); assert_eq!(updated_book.publication_year, 1966); assert_eq!(updated_book.in_stock, Some(false)); assert_eq!(updated_book.title, book_1.title); // Other fields are preserved - // --- 7. Delete a single document, receiving the typed struct back --- - let delete_res = typed_collection.document(&book_1.id).delete().await; - assert!(delete_res.is_ok(), "Failed to delete typed document"); + // --- 7. Bulk Update (via `documents().update()`) --- + let bulk_update_params = UpdateDocumentsParameters { + filter_by: Some("publication_year:>1965".to_owned()), + }; + let bulk_update_response = typed_collection + .documents() + .update( + &BookPartial { + publication_year: Some(2100), + ..Default::default() + }, + bulk_update_params, + ) + .await + .expect("Bulk update failed"); + // Should update book 1 (1966) + assert_eq!(bulk_update_response.num_updated, 1); + + // --- 8. Delete a single document, receiving the typed struct back --- + let deleted_book = typed_collection + .document(&book_1.id) + .delete() + .await + .expect("Failed to delete typed document"); // The deleted document (in its state just before deletion) is returned - let deleted_book = delete_res.unwrap(); assert_eq!(deleted_book.id, book_1.id); - assert_eq!(deleted_book.publication_year, 1966); // It was the updated version + assert_eq!(deleted_book.publication_year, 2100); // It was the bulk updated version - // --- 8. Verify single deletion --- + // --- 9. Verify single deletion --- let retrieve_after_delete_res = typed_collection.document(&book_1.id).retrieve().await; assert!( retrieve_after_delete_res.is_err(), @@ -344,8 +313,8 @@ mod tokio_test { use super::*; #[tokio::test] - async fn test_document_lifecycle() { - run_test_document_lifecycle().await; + async fn test_schemaless_document_lifecycle() { + run_test_schemaless_document_lifecycle().await; } #[tokio::test] async fn test_generic_document_lifecycle() { @@ -361,9 +330,9 @@ mod wasm_test { wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser); #[wasm_bindgen_test] - async fn test_document_lifecycle() { + async fn test_schemaless_document_lifecycle() { console_error_panic_hook::set_once(); - run_test_document_lifecycle().await; + run_test_schemaless_document_lifecycle().await; } #[wasm_bindgen_test] diff --git a/typesense_codegen/src/apis/documents_api.rs b/typesense_codegen/src/apis/documents_api.rs index 92a3e8e..1ab77a3 100644 --- a/typesense_codegen/src/apis/documents_api.rs +++ b/typesense_codegen/src/apis/documents_api.rs @@ -255,24 +255,24 @@ pub struct SearchCollectionParams { /// struct for passing parameters to the method [`update_document`] #[derive(Clone, Debug)] -pub struct UpdateDocumentParams { +pub struct UpdateDocumentParams { /// The name of the collection to search for the document under pub collection_name: String, /// The Document ID pub document_id: String, /// The document object with fields to be updated - pub body: serde_json::Value, + pub body: B, /// Dealing with Dirty Data pub dirty_values: Option, } /// struct for passing parameters to the method [`update_documents`] #[derive(Clone, Debug)] -pub struct UpdateDocumentsParams { +pub struct UpdateDocumentsParams { /// The name of the collection to update documents in pub collection_name: String, /// The document fields to be updated - pub body: serde_json::Value, + pub body: B, pub filter_by: Option, } @@ -1548,9 +1548,9 @@ pub async fn search_collection serde::Deserialize<'de> + Serialize>( } /// Update an individual document from a collection by using its ID. The update can be partial. -pub async fn update_document( +pub async fn update_document( configuration: &configuration::Configuration, - params: &UpdateDocumentParams, + params: &UpdateDocumentParams, ) -> Result> { let uri_str = format!( "{}/collections/{collectionName}/documents/{documentId}", @@ -1616,9 +1616,9 @@ pub async fn update_document( } /// The filter_by query parameter is used to filter to specify a condition against which the documents are matched. The request body contains the fields that should be updated for any documents that match the filter condition. This endpoint is only available if the Typesense server is version `0.25.0.rc12` or later. -pub async fn update_documents( +pub async fn update_documents( configuration: &configuration::Configuration, - params: &UpdateDocumentsParams, + params: &UpdateDocumentsParams, ) -> Result> { let uri_str = format!( "{}/collections/{collectionName}/documents", diff --git a/typesense_codegen/src/lib.rs b/typesense_codegen/src/lib.rs index d9c227b..3280d74 100644 --- a/typesense_codegen/src/lib.rs +++ b/typesense_codegen/src/lib.rs @@ -1,7 +1,8 @@ -#![allow(unused_imports)] +#![allow(clippy::derivable_impls)] #![allow(clippy::empty_docs)] #![allow(clippy::needless_return)] #![allow(elided_lifetimes_in_paths)] +#![allow(unused_imports)] pub mod apis; pub mod models; diff --git a/typesense_derive/src/lib.rs b/typesense_derive/src/lib.rs index a4fc97e..3248e62 100644 --- a/typesense_derive/src/lib.rs +++ b/typesense_derive/src/lib.rs @@ -16,7 +16,7 @@ fn impl_typesense_collection(item: ItemStruct) -> syn::Result { let ItemStruct { attrs, - vis: _, + vis, struct_token: _, ident, generics, @@ -77,15 +77,38 @@ fn impl_typesense_collection(item: ItemStruct) -> syn::Result { proc_macro2::TokenStream::new() }; + let optional_fields = fields.iter().filter_map(|f| { + let ident = f.ident.as_ref()?; + if ident == "id" { + return None; + } + let vis = &f.vis; + let ty = &f.ty; + + Some(quote! { + #[serde(skip_serializing_if = "Option::is_none")] + #vis #ident: Option<#ty>, + }) + }); + + let name_partial = Ident::new(&(ident.to_string() + "Partial"), ident.span()); + let generated_code = quote! { - impl #impl_generics typesense::prelude::Document for #ident #ty_generics #where_clause { + #[derive(Default, Serialize)] + #vis struct #name_partial { + #(#optional_fields)* + } + impl ::typesense::prelude::DocumentPartial for #name_partial {} + + impl #impl_generics ::typesense::prelude::Document for #ident #ty_generics #where_clause { const COLLECTION_NAME: &str = #collection_name; + type Partial = #name_partial; - fn collection_schema() -> typesense::models::CollectionSchema { + fn collection_schema() -> ::typesense::models::CollectionSchema { let name = Self::COLLECTION_NAME.to_owned(); let fields = vec![#(#typesense_fields,)*]; - let builder = typesense::models::CollectionSchema::builder().name(name).fields(fields); + let builder = ::typesense::models::CollectionSchema::builder().name(name).fields(fields); #default_sorting_field #enable_nested_fields @@ -120,7 +143,7 @@ fn add_trait_bounds(mut generics: syn::Generics) -> syn::Generics { if let syn::GenericParam::Type(ref mut type_param) = *param { type_param .bounds - .push(syn::parse_quote!(typesense::field::ToTypesenseField)); + .push(syn::parse_quote!(::typesense::field::ToTypesenseField)); } } generics @@ -287,11 +310,11 @@ fn to_typesense_field_type(field: &Field) -> syn::Result::to_typesense_type().to_owned() + <#ty as ::typesense::prelude::ToTypesenseField>::to_typesense_type().to_owned() ); Ok(quote! { - typesense::models::Field::builder().name(std::string::String::from(stringify!(#name))).r#type(#typesense_field_type) + ::typesense::models::Field::builder().name(std::string::String::from(stringify!(#name))).r#type(#typesense_field_type) .maybe_optional(#optional) .maybe_facet(#facet) .build() diff --git a/xtask/src/add_vendor_attributes.rs b/xtask/src/add_vendor_attributes.rs index b6fa561..d8fd0aa 100644 --- a/xtask/src/add_vendor_attributes.rs +++ b/xtask/src/add_vendor_attributes.rs @@ -62,5 +62,22 @@ pub fn add_vendor_attributes(doc_root: &mut Mapping) -> Result<(), String> { .supports_plain_text() .done()?; + attrs + .operation("/collections/{collectionName}/documents", "patch") + .generic_parameter("") + .params_generic_parameter("") + .request_type("B") + .done()?; + + attrs + .operation( + "/collections/{collectionName}/documents/{documentId}", + "patch", + ) + .generic_parameter("") + .params_generic_parameter("") + .request_type("B") + .done()?; + Ok(()) } diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 1ef8017..b90865b 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -32,32 +32,31 @@ struct Cli { #[derive(ValueEnum, Clone, Debug)] #[clap(rename_all = "kebab-case")] // Allows us to type `code-gen` instead of `CodeGen` enum Task { - /// Fetches the latest OpenAPI spec from [the Typesense repository](https://github.com/typesense/typesense-api-spec/blob/master/openapi.yml). - Fetch, /// Generates client code from the spec file using the Docker container. CodeGen, + /// Fetches the latest OpenAPI spec from [the Typesense repository](https://github.com/typesense/typesense-api-spec/blob/master/openapi.yml). + Fetch, + /// Preprocesses fetched OpenAPI spec file into a new one + Preprocess, } -#[cfg(target_family = "wasm")] -fn main() {} - -#[cfg(not(target_family = "wasm"))] fn main() -> Result<()> { let cli = Cli::parse(); for task in cli.tasks { println!("▶️ Running task: {:?}", task); match task { - Task::Fetch => task_fetch_api_spec()?, Task::CodeGen => task_codegen()?, + Task::Fetch => task_fetch_api_spec()?, + Task::Preprocess => preprocess_openapi_file(INPUT_SPEC_FILE, OUTPUT_PREPROCESSED_FILE) + .expect("Preprocess failed, aborting!"), } } Ok(()) } -#[cfg(not(target_family = "wasm"))] fn task_fetch_api_spec() -> Result<()> { - println!("▶️ Running codegen task..."); + println!("▶️ Running fetch task..."); println!(" - Downloading spec from {}", SPEC_URL); let response = @@ -80,10 +79,6 @@ fn task_fetch_api_spec() -> Result<()> { /// Task to generate client code from the OpenAPI spec using a Docker container. fn task_codegen() -> Result<()> { println!("▶️ Running codegen task via Docker..."); - - println!("Preprocessing the Open API spec file..."); - preprocess_openapi_file(INPUT_SPEC_FILE, OUTPUT_PREPROCESSED_FILE) - .expect("Preprocess failed, aborting!"); // Get the absolute path to the project's root directory. // std::env::current_dir() gives us the directory from which `cargo xtask` was run. let project_root = env::current_dir().context("Failed to get current directory")?; diff --git a/xtask/src/preprocess_openapi.rs b/xtask/src/preprocess_openapi.rs index 1e49ad5..abb1046 100644 --- a/xtask/src/preprocess_openapi.rs +++ b/xtask/src/preprocess_openapi.rs @@ -8,6 +8,7 @@ pub fn preprocess_openapi_file( input_path: &str, output_path: &str, ) -> Result<(), Box> { + println!("Preprocessing the Open API spec file..."); // --- Step 1: Read the OpenAPI spec from the input file --- println!("Reading OpenAPI spec from {}...", input_path); let input_content = fs::read_to_string(input_path) diff --git a/xtask/src/vendor_attributes.rs b/xtask/src/vendor_attributes.rs index 17d411e..466d88a 100644 --- a/xtask/src/vendor_attributes.rs +++ b/xtask/src/vendor_attributes.rs @@ -3,8 +3,19 @@ use serde_yaml::{Mapping, Value}; /// Where to apply a vendor (x-*) attribute. pub enum VendorLocation<'a> { Schema(&'a str), - SchemaField { schema: &'a str, field: &'a str }, - Operation { path: &'a str, method: &'a str }, + SchemaField { + schema: &'a str, + field: &'a str, + }, + Operation { + path: &'a str, + method: &'a str, + }, + OperationField { + path: &'a str, + method: &'a str, + field: &'a str, + }, } /// Main helper struct that holds a mutable borrow of the OpenAPI root mapping. @@ -86,6 +97,19 @@ impl<'a> VendorAttributes<'a> { Self::insert_into_map(op_map, attr, val); Ok(self) } + VendorLocation::OperationField { + path, + method, + field, + } => { + let field_map = + self.get_map_mut(&["paths", path, method, field]) + .map_err(|_| { + format!("operation field not found: {} {} {}", method, path, field) + })?; + Self::insert_into_map(field_map, attr, val); + Ok(self) + } } } @@ -166,13 +190,44 @@ impl<'a, 'b> OperationContext<'a, 'b> { } } + fn try_set_field(&mut self, field: &str, attr: &str, val: Value) { + if self.error.is_some() { + return; + } + if let Err(e) = self.vendor.set_attr( + VendorLocation::OperationField { + path: self.path, + method: self.method, + field, + }, + attr, + val, + ) { + self.error = Some(e); + } + } + pub fn generic_parameter(mut self, generic: &str) -> Self { self.try_set("x-rust-generic-parameter", Value::String(generic.into())); self } - pub fn return_type(mut self, rust_type: &str) -> Self { - self.try_set("x-rust-return-type", Value::String(rust_type.into())); + pub fn params_generic_parameter(mut self, generic: &str) -> Self { + self.try_set_field( + "requestBody", + "x-rust-params-generic-parameter", + Value::String(generic.into()), + ); + self + } + + pub fn return_type(mut self, typ: &str) -> Self { + self.try_set("x-rust-return-type", Value::String(typ.into())); + self + } + + pub fn request_type(mut self, typ: &str) -> Self { + self.try_set_field("requestBody", "x-rust-type", Value::String(typ.into())); self }