Skip to content

Commit 84264f2

Browse files
authored
Merge pull request #34 from ummarig/feat/horizon
Feat/horizon
2 parents b61cb87 + 178c068 commit 84264f2

File tree

13 files changed

+3399
-0
lines changed

13 files changed

+3399
-0
lines changed

HORIZON_CLIENT.md

Lines changed: 564 additions & 0 deletions
Large diffs are not rendered by default.

crates/tools/Cargo.toml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,15 @@ toml = "0.7"
2020
thiserror = "1.0"
2121
stellar-baselib = "0.5.6"
2222
url = "2.5"
23+
reqwest = { version = "0.12", features = ["json"] }
24+
governor = "0.10"
25+
moka = { version = "0.12", features = ["future"] }
26+
chrono = { version = "0.4", features = ["serde"] }
27+
log = "0.4"
28+
env_logger = "0.11"
29+
uuid = { version = "1.0", features = ["v4", "serde"] }
30+
futures = "0.3"
31+
rand = "0.8"
2332

2433
[dev-dependencies]
2534
tempfile = "3"
Lines changed: 162 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,162 @@
1+
//! Response Caching for Horizon API
2+
//!
3+
//! Implements optional response caching to reduce API calls.
4+
5+
use crate::horizon_error::HorizonResult;
6+
use moka::future::Cache;
7+
use serde_json::Value;
8+
use std::time::Duration;
9+
10+
/// Cache statistics
11+
#[derive(Debug, Clone)]
12+
pub struct CacheStats {
13+
/// Number of cache entries
14+
pub entries: u64,
15+
/// Cache hits
16+
pub hits: u64,
17+
/// Cache misses
18+
pub misses: u64,
19+
}
20+
21+
/// Response cache for Horizon API
22+
pub struct ResponseCache {
23+
/// Internal cache
24+
cache: Cache<String, Value>,
25+
/// Hit count
26+
hits: std::sync::atomic::AtomicU64,
27+
/// Miss count
28+
misses: std::sync::atomic::AtomicU64,
29+
}
30+
31+
impl ResponseCache {
32+
/// Create a new response cache with TTL
33+
pub fn new(ttl: Duration) -> Self {
34+
let cache = Cache::builder()
35+
.time_to_live(ttl)
36+
.build();
37+
38+
Self {
39+
cache,
40+
hits: std::sync::atomic::AtomicU64::new(0),
41+
misses: std::sync::atomic::AtomicU64::new(0),
42+
}
43+
}
44+
45+
/// Get a cached response
46+
pub async fn get(&self, key: &str) -> HorizonResult<Value> {
47+
if let Some(value) = self.cache.get(key).await {
48+
self.hits.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
49+
return Ok(value);
50+
}
51+
52+
self.misses.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
53+
Err(crate::horizon_error::HorizonError::CacheError(
54+
"Cache miss".to_string(),
55+
))
56+
}
57+
58+
/// Set a cached response
59+
pub async fn set(&self, key: &str, value: Value) {
60+
self.cache.insert(key.to_string(), value).await;
61+
}
62+
63+
/// Clear the cache
64+
pub async fn clear(&self) {
65+
self.cache.invalidate_all();
66+
}
67+
68+
/// Get cache statistics
69+
pub fn stats(&self) -> Option<CacheStats> {
70+
let hits = self.hits.load(std::sync::atomic::Ordering::Relaxed);
71+
let misses = self.misses.load(std::sync::atomic::Ordering::Relaxed);
72+
73+
Some(CacheStats {
74+
entries: self.cache.entry_count(),
75+
hits,
76+
misses,
77+
})
78+
}
79+
80+
/// Get hit rate percentage
81+
pub fn hit_rate(&self) -> f64 {
82+
let hits = self.hits.load(std::sync::atomic::Ordering::Relaxed) as f64;
83+
let misses = self.misses.load(std::sync::atomic::Ordering::Relaxed) as f64;
84+
let total = hits + misses;
85+
86+
if total == 0.0 {
87+
0.0
88+
} else {
89+
(hits / total) * 100.0
90+
}
91+
}
92+
93+
/// Reset statistics
94+
pub fn reset_stats(&self) {
95+
self.hits.store(0, std::sync::atomic::Ordering::Relaxed);
96+
self.misses.store(0, std::sync::atomic::Ordering::Relaxed);
97+
}
98+
}
99+
100+
#[cfg(test)]
101+
mod tests {
102+
use super::*;
103+
104+
#[tokio::test]
105+
async fn test_cache_set_get() {
106+
let cache = ResponseCache::new(Duration::from_secs(60));
107+
let value = serde_json::json!({"test": "value"});
108+
109+
cache.set("key1", value.clone()).await;
110+
let result = cache.get("key1").await;
111+
assert!(result.is_ok());
112+
}
113+
114+
#[tokio::test]
115+
async fn test_cache_miss() {
116+
let cache = ResponseCache::new(Duration::from_secs(60));
117+
let result = cache.get("nonexistent").await;
118+
assert!(result.is_err());
119+
}
120+
121+
#[tokio::test]
122+
async fn test_cache_clear() {
123+
let cache = ResponseCache::new(Duration::from_secs(60));
124+
let value = serde_json::json!({"test": "value"});
125+
126+
cache.set("key1", value).await;
127+
cache.clear().await;
128+
129+
let result = cache.get("key1").await;
130+
assert!(result.is_err());
131+
}
132+
133+
#[tokio::test]
134+
async fn test_cache_stats() {
135+
let cache = ResponseCache::new(Duration::from_secs(60));
136+
let value = serde_json::json!({"test": "value"});
137+
138+
cache.set("key1", value.clone()).await;
139+
let _ = cache.get("key1").await; // Hit
140+
let _ = cache.get("key2").await; // Miss
141+
142+
let stats = cache.stats();
143+
assert!(stats.is_some());
144+
}
145+
146+
#[tokio::test]
147+
async fn test_cache_hit_rate() {
148+
let cache = ResponseCache::new(Duration::from_secs(60));
149+
let value = serde_json::json!({"test": "value"});
150+
151+
// Populate cache
152+
cache.set("key1", value.clone()).await;
153+
154+
// 2 hits, 1 miss
155+
let _ = cache.get("key1").await; // Hit
156+
let _ = cache.get("key1").await; // Hit
157+
let _ = cache.get("key2").await; // Miss
158+
159+
let rate = cache.hit_rate();
160+
assert!(rate > 60.0 && rate < 70.0); // 2/3 ≈ 66.67%
161+
}
162+
}

0 commit comments

Comments
 (0)