Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions icechunk/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ reqwest = { version = "0.12.26", default-features = false, features = [
"http2",
"system-proxy",
] }
tempfile = "3.23.0"
[dev-dependencies]
fs_extra = "1.3.0"
icechunk-macros = { path = "../icechunk-macros", version = "0.1.0" }
Expand Down
2 changes: 2 additions & 0 deletions icechunk/src/change_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -965,4 +965,6 @@ mod tests {
&Path::new("/other").unwrap(),
);
}

// roundtrip_serialization_tests!(serialize_and_deserialize_change_sets - change_sets);
}
40 changes: 21 additions & 19 deletions icechunk/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -632,25 +632,17 @@ pub enum Credentials {
Azure(AzureCredentials),
}

#[cfg(test)]
#[allow(clippy::panic, clippy::unwrap_used, clippy::expect_used)]
mod tests {
use crate::strategies::{
azure_credentials, gcs_static_credentials, repository_config,
s3_static_credentials,
};

use proptest::prelude::*;

// This macro is used for creating property tests
// which check that serializing and deserializing
// an instance of a type T is equivalent to the
// identity function
// Given pairs of test names and arbitraries to be used
// for the tests, e.g., (n1, a1), (n2, a2),... (nx, ax)
// the tests can be created by doing
// roundtrip_serialization_tests!(n1 - a1, n2 - a2, .... nx - ax)
macro_rules! roundtrip_serialization_tests {
// This macro is used for creating property tests
// which check that serializing and deserializing
// an instance of a type T is equivalent to the
// identity function
// Given pairs of test names and arbitraries to be used
// for the tests, e.g., (n1, a1), (n2, a2),... (nx, ax)
// the tests can be created by doing
// roundtrip_serialization_tests!(n1 - a1, n2 - a2, .... nx - ax)
#[allow(unused_macros)]
#[macro_export]
macro_rules! roundtrip_serialization_tests {
($($test_name: ident - $generator: ident), +) => {
$(
proptest!{
Expand All @@ -663,6 +655,16 @@ mod tests {
}
}

#[cfg(test)]
#[allow(clippy::panic, clippy::unwrap_used, clippy::expect_used)]
mod tests {
use crate::strategies::{
azure_credentials, gcs_static_credentials, repository_config,
s3_static_credentials,
};

use proptest::prelude::*;

roundtrip_serialization_tests!(
test_config_roundtrip - repository_config,
test_s3_static_credentials_roundtrip - s3_static_credentials,
Expand Down
4 changes: 4 additions & 0 deletions icechunk/src/storage/redirect.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ impl RedirectStorage {
}

async fn mk_backend(&self) -> StorageResult<Arc<dyn Storage>> {
println!("\n------Using this function-----\n\n");
let redirect = |attempt: rw::redirect::Attempt| {
// TODO: make configurable
if attempt.previous().len() > 10 {
Expand All @@ -77,6 +78,8 @@ impl RedirectStorage {
"Cannot build http client for redirect Storage instance: {e}"
)))
})?;

println!(" The URL is {}", self.url.clone());
let req = client.get(self.url.clone()).build().map_err(|e| {
StorageError::from(StorageErrorKind::BadRedirect(format!(
"Cannot build http request for redirect Storage instance: {e}"
Expand All @@ -87,6 +90,7 @@ impl RedirectStorage {
"Request to redirect url ({}) failed, cannot find target Storage instance: {e}", &self.url
)))
})?;
println!("The status: {}, The headers: {:?}", res.status(), res.headers());
let storage_url = res.headers().get("location").ok_or_else(|| {
StorageError::from(StorageErrorKind::BadRedirect(
"Redirect Storage response must be a redirect, no location header detected".to_string()
Expand Down
53 changes: 53 additions & 0 deletions icechunk/src/strategies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -456,3 +456,56 @@ pub fn azure_credentials() -> BoxedStrategy<AzureCredentials> {
use AzureCredentials::*;
prop_oneof![Just(FromEnv), azure_static_credentials().prop_map(Static)].boxed()
}

// pub fn path() -> BoxedStrategy<Path> {
// Just(())
// .prop_filter_map("Could not generate a valid file path", |_| {
// let canon_file_path = NamedTempFile::new()
// .ok()
// .and_then(|file| file.path().canonicalize().ok())?;
//
// canon_file_path.to_str().and_then(|file_name| Path::new(file_name).ok())
// })
// .boxed()
// }
//
// type DimensionShapeInfo = (u64, u64);
//
// prop_compose! {
// fn dimension_shape_info()(dim_length in any::<u64>(), chunk_length in any::<NonZeroU64>()) -> DimensionShapeInfo {
// (dim_length, chunk_length.get())
// }
// }
//
// prop_compose! {
// fn array_shape()(dimensions in vec(dimension_shape_info(), 10)) -> ArrayShape {
// ArrayShape::new(dimensions).unwrap()
// }
// }
//
// fn dimension_name() -> BoxedStrategy<DimensionName> {
// use DimensionName::*;
// prop_oneof![
// Just(NotSpecified),
// any::<String>().prop_map(Name)
// ].boxed()
// }
//
// prop_compose! {
// fn bytes()(random_data in any::<Vec<u8>>()) -> Bytes {
// Bytes::from(random_data)
// }
// }
//
// prop_compose! {
// fn array_data()(shape in array_shape(),
// dimension_names in option::of(vec(dimension_name(), 10)),
// user_data in bytes()) -> ArrayData {
// ArrayData{shape, dimension_names, user_data}
// }
// }
//
// fn node_id() -> BoxedStrategy<NodeId> {
// Just(NodeId::random()).boxed()
// }
//
4 changes: 4 additions & 0 deletions icechunk/tests/test_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -933,9 +933,13 @@ pub async fn test_redirect_storage() -> Result<(), Box<dyn std::error::Error>> {

let join = tokio::task::spawn(server.run());
let url = format!("http://localhost:{port}");
println!("Have not errored before reaching this point");
let storage = new_redirect_storage(url.as_str())?;
println!("The value is {storage:?}");
let mut data = Vec::with_capacity(1_024);
println!("Have not errored before reaching the second point");
let settings = storage.default_settings().await?;
println!("Have not errored before reaching the third point");
let mut read =
storage.get_object(&settings, "refs/branch.main/ref.json", None).await?.0;

Expand Down
Loading