Skip to content

Commit 1d81142

Browse files
authored
Bulk (#8)
1 parent 0abbdb0 commit 1d81142

File tree

3 files changed

+40
-28
lines changed

3 files changed

+40
-28
lines changed

.github/workflows/ci-monorepo.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ jobs:
209209
- name: "BDD Tests"
210210
command: "pytest tests/bdd -v"
211211
- name: "Example App"
212-
command: "cd ../../examples/fastapi_app && pytest tests/ -v"
212+
command: "cd examples/fastapi_app && pytest tests/ -v"
213213

214214
services:
215215
cassandra:

libs/async-cassandra/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ test = [
6262
"httpx>=0.24.0",
6363
"uvicorn>=0.23.0",
6464
"psutil>=5.9.0",
65+
"pyarrow>=10.0.0",
6566
]
6667
docs = [
6768
"sphinx>=6.0.0",

libs/async-cassandra/tests/integration/test_example_scripts.py

Lines changed: 38 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -91,13 +91,15 @@ async def test_streaming_basic_example(self, cassandra_cluster):
9191
# Verify expected output patterns
9292
# The examples use logging which outputs to stderr
9393
output = result.stderr if result.stderr else result.stdout
94-
assert "Basic Streaming Example" in output
94+
assert "BASIC STREAMING EXAMPLE" in output
9595
assert "Inserted 100000 test events" in output or "Inserted 100,000 test events" in output
96-
assert "Streaming completed:" in output
96+
assert "Streaming completed!" in output
9797
assert "Total events: 100,000" in output or "Total events: 100000" in output
98-
assert "Filtered Streaming Example" in output
99-
assert "Page-Based Streaming Example (True Async Paging)" in output
100-
assert "Pages are fetched asynchronously" in output
98+
assert "FILTERED STREAMING EXAMPLE" in output
99+
assert "PAGE-BASED STREAMING EXAMPLE (True Async Paging)" in output
100+
assert (
101+
"Pages are fetched ON-DEMAND" in output or "Pages were fetched asynchronously" in output
102+
)
101103

102104
# Verify keyspace was cleaned up
103105
async with AsyncCluster(["localhost"]) as cluster:
@@ -152,8 +154,8 @@ async def test_export_large_table_example(self, cassandra_cluster, tmp_path):
152154

153155
# Verify expected output (might be in stdout or stderr due to logging)
154156
output = result.stdout + result.stderr
155-
assert "Created 5000 sample products" in output
156-
assert "Export completed:" in output
157+
assert "Created 5,000 sample products" in output
158+
assert "EXPORT COMPLETED SUCCESSFULLY!" in output
157159
assert "Rows exported: 5,000" in output
158160
assert f"Output directory: {export_dir}" in output
159161

@@ -235,16 +237,16 @@ async def test_context_manager_safety_demo(self, cassandra_cluster):
235237

236238
# Verify all demonstrations ran (might be in stdout or stderr due to logging)
237239
output = result.stdout + result.stderr
238-
assert "Demonstrating Query Error Safety" in output
240+
assert "QUERY ERROR SAFETY DEMONSTRATION" in output
239241
assert "Query failed as expected" in output
240-
assert "Session still works after error" in output
242+
assert "Session is healthy!" in output
241243

242-
assert "Demonstrating Streaming Error Safety" in output
244+
assert "STREAMING ERROR SAFETY DEMONSTRATION" in output
243245
assert "Streaming failed as expected" in output
244246
assert "Successfully streamed" in output
245247

246-
assert "Demonstrating Context Manager Isolation" in output
247-
assert "Demonstrating Concurrent Safety" in output
248+
assert "CONTEXT MANAGER ISOLATION DEMONSTRATION" in output
249+
assert "CONCURRENT OPERATIONS SAFETY DEMONSTRATION" in output
248250

249251
# Verify key takeaways are shown
250252
assert "Query errors don't close sessions" in output
@@ -285,15 +287,19 @@ async def test_metrics_simple_example(self, cassandra_cluster):
285287

286288
# Verify metrics output (might be in stdout or stderr due to logging)
287289
output = result.stdout + result.stderr
288-
assert "Query Metrics Example" in output or "async-cassandra Metrics Example" in output
289-
assert "Connection Health Monitoring" in output
290-
assert "Error Tracking Example" in output or "Expected error recorded" in output
291-
assert "Performance Summary" in output
290+
assert "ASYNC-CASSANDRA METRICS COLLECTION EXAMPLE" in output
291+
assert "CONNECTION HEALTH MONITORING" in output
292+
assert "ERROR TRACKING DEMONSTRATION" in output or "Expected error captured" in output
293+
assert "PERFORMANCE METRICS SUMMARY" in output
292294

293295
# Verify statistics are shown
294296
assert "Total queries:" in output or "Query Metrics:" in output
295297
assert "Success rate:" in output or "Success Rate:" in output
296-
assert "Average latency:" in output or "Average Duration:" in output
298+
assert (
299+
"Average latency:" in output
300+
or "Average Duration:" in output
301+
or "Query Performance:" in output
302+
)
297303

298304
@pytest.mark.timeout(240) # Override default timeout for this test (lots of data)
299305
async def test_realtime_processing_example(self, cassandra_cluster):
@@ -333,15 +339,19 @@ async def test_realtime_processing_example(self, cassandra_cluster):
333339
output = result.stdout + result.stderr
334340

335341
# Check that setup completed
336-
assert "Setting up sensor data" in output
337-
assert "Sample data inserted" in output
342+
assert "Setting up IoT sensor data simulation" in output
343+
assert "Sample data setup complete" in output
338344

339345
# Check that processing occurred
340-
assert "Processing Historical Data" in output or "Processing historical data" in output
341-
assert "Processing completed" in output or "readings processed" in output
346+
assert "PROCESSING HISTORICAL DATA" in output or "Processing Historical Data" in output
347+
assert (
348+
"Processing completed" in output
349+
or "readings processed" in output
350+
or "Analysis complete!" in output
351+
)
342352

343353
# Check that real-time simulation ran
344-
assert "Simulating Real-Time Processing" in output or "Processing cycle" in output
354+
assert "SIMULATING REAL-TIME PROCESSING" in output or "Processing cycle" in output
345355

346356
# Verify cleanup
347357
assert "Cleaning up" in output
@@ -436,11 +446,12 @@ async def test_export_to_parquet_example(self, cassandra_cluster, tmp_path):
436446
output = result.stderr if result.stderr else result.stdout
437447
assert "Setting up test data" in output
438448
assert "Test data setup complete" in output
439-
assert "Example 1: Export Entire Table" in output
440-
assert "Example 2: Export Filtered Data" in output
441-
assert "Example 3: Export with Different Compression" in output
442-
assert "Export completed successfully!" in output
443-
assert "Verifying Exported Files" in output
449+
assert "EXPORT SUMMARY" in output
450+
assert "SNAPPY compression:" in output
451+
assert "GZIP compression:" in output
452+
assert "LZ4 compression:" in output
453+
assert "Three exports completed:" in output
454+
assert "VERIFYING EXPORTED PARQUET FILES" in output
444455
assert f"Output directory: {export_dir}" in output
445456

446457
# Verify Parquet files were created (look recursively in subdirectories)

0 commit comments

Comments
 (0)