cached/proc_macro.rs
1#![cfg_attr(docsrs, doc(cfg(feature = "proc_macro")))]
2
3/*!
4Procedural macros for defining functions that wrap a static-ref cache object.
5
6```rust,no_run
7use std::thread::sleep;
8use std::time::Duration;
9use cached::proc_macro::cached;
10
11/// Use an lru cache with size 100 and a `(String, String)` cache key
12#[cached(size=100)]
13fn keyed(a: String, b: String) -> usize {
14 let size = a.len() + b.len();
15 sleep(Duration::new(size as u64, 0));
16 size
17}
18# pub fn main() { }
19```
20
21----
22
23```rust,no_run
24use std::thread::sleep;
25use std::time::Duration;
26use cached::proc_macro::cached;
27
28/// Use a timed-lru cache with size 1, a TTL of 60s,
29/// and a `(usize, usize)` cache key
30#[cached(size=1, time=60)]
31fn keyed(a: usize, b: usize) -> usize {
32 let total = a + b;
33 sleep(Duration::new(total as u64, 0));
34 total
35}
36pub fn main() {
37 keyed(1, 2); // Not cached, will sleep (1+2)s
38
39 keyed(1, 2); // Cached, no sleep
40
41 sleep(Duration::new(60, 0)); // Sleep for the TTL
42
43 keyed(1, 2); // 60s TTL has passed so the cached
44 // value has expired, will sleep (1+2)s
45
46 keyed(1, 2); // Cached, no sleep
47
48 keyed(2, 1); // New args, not cached, will sleep (2+1)s
49
50 keyed(1, 2); // Was evicted because of lru size of 1,
51 // will sleep (1+2)s
52}
53```
54
55----
56
57```rust,no_run
58use std::thread::sleep;
59use std::time::Duration;
60use cached::proc_macro::cached;
61
62/// Use a timed cache with a TTL of 60s
63/// that refreshes the entry TTL on cache hit,
64/// and a `(String, String)` cache key
65#[cached(time=60, time_refresh=true)]
66fn keyed(a: String, b: String) -> usize {
67 let size = a.len() + b.len();
68 sleep(Duration::new(size as u64, 0));
69 size
70}
71# pub fn main() { }
72```
73
74----
75
76```rust,no_run
77use cached::proc_macro::cached;
78
79# fn do_something_fallible() -> std::result::Result<(), ()> {
80# Ok(())
81# }
82
83/// Cache a fallible function. Only `Ok` results are cached.
84#[cached(size=1, result = true)]
85fn keyed(a: String) -> Result<usize, ()> {
86 do_something_fallible()?;
87 Ok(a.len())
88}
89# pub fn main() { }
90```
91
92----
93
94```rust,no_run
95use cached::proc_macro::cached;
96
97/// Cache an optional function. Only `Some` results are cached.
98#[cached(size=1, option = true)]
99fn keyed(a: String) -> Option<usize> {
100 if a == "a" {
101 Some(a.len())
102 } else {
103 None
104 }
105}
106# pub fn main() { }
107```
108
109----
110
111```rust,no_run
112use cached::proc_macro::cached;
113
114/// Cache an optional function. Only `Some` results are cached.
115/// When called concurrently, duplicate argument-calls will be
116/// synchronized so as to only run once - the remaining concurrent
117/// calls return a cached value.
118#[cached(size=1, option = true, sync_writes = "default")]
119fn keyed(a: String) -> Option<usize> {
120 if a == "a" {
121 Some(a.len())
122 } else {
123 None
124 }
125}
126# pub fn main() { }
127```
128
129----
130
131```rust,no_run
132use cached::proc_macro::cached;
133use cached::Return;
134
135/// Get a `cached::Return` value that indicates
136/// whether the value returned came from the cache:
137/// `cached::Return.was_cached`.
138/// Use an LRU cache and a `String` cache key.
139#[cached(size=1, with_cached_flag = true)]
140fn calculate(a: String) -> Return<String> {
141 Return::new(a)
142}
143pub fn main() {
144 let r = calculate("a".to_string());
145 assert!(!r.was_cached);
146 let r = calculate("a".to_string());
147 assert!(r.was_cached);
148 // Return<String> derefs to String
149 assert_eq!(r.to_uppercase(), "A");
150}
151```
152
153----
154
155```rust,no_run
156use cached::proc_macro::cached;
157use cached::Return;
158
159# fn do_something_fallible() -> std::result::Result<(), ()> {
160# Ok(())
161# }
162
163/// Same as the previous, but returning a Result
164#[cached(size=1, result = true, with_cached_flag = true)]
165fn calculate(a: String) -> Result<Return<usize>, ()> {
166 do_something_fallible()?;
167 Ok(Return::new(a.len()))
168}
169pub fn main() {
170 match calculate("a".to_string()) {
171 Err(e) => eprintln!("error: {:?}", e),
172 Ok(r) => {
173 println!("value: {:?}, was cached: {}", *r, r.was_cached);
174 // value: "a", was cached: true
175 }
176 }
177}
178```
179
180----
181
182```rust,no_run
183use cached::proc_macro::cached;
184use cached::Return;
185
186/// Same as the previous, but returning an Option
187#[cached(size=1, option = true, with_cached_flag = true)]
188fn calculate(a: String) -> Option<Return<usize>> {
189 if a == "a" {
190 Some(Return::new(a.len()))
191 } else {
192 None
193 }
194}
195pub fn main() {
196 if let Some(a) = calculate("a".to_string()) {
197 println!("value: {:?}, was cached: {}", *a, a.was_cached);
198 // value: "a", was cached: true
199 }
200}
201```
202
203----
204
205```rust,no_run
206use std::thread::sleep;
207use std::time::Duration;
208use cached::proc_macro::cached;
209use cached::SizedCache;
210
211/// Use an explicit cache-type with a custom creation block and custom cache-key generating block
212#[cached(
213 ty = "SizedCache<String, usize>",
214 create = "{ SizedCache::with_size(100) }",
215 convert = r#"{ format!("{}{}", a, b) }"#
216)]
217fn keyed(a: &str, b: &str) -> usize {
218 let size = a.len() + b.len();
219 sleep(Duration::new(size as u64, 0));
220 size
221}
222# pub fn main() { }
223```
224
225----
226
227```rust,no_run
228use cached::proc_macro::once;
229
230/// Only cache the initial function call.
231/// Function will be re-executed after the cache
232/// expires (according to `time` seconds).
233/// When no (or expired) cache, concurrent calls
234/// will synchronize (`sync_writes`) so the function
235/// is only executed once.
236#[once(time=10, option = true, sync_writes = true)]
237fn keyed(a: String) -> Option<usize> {
238 if a == "a" {
239 Some(a.len())
240 } else {
241 None
242 }
243}
244# pub fn main() { }
245```
246
247----
248
249```rust
250use std::thread::sleep;
251use std::time::Duration;
252use cached::proc_macro::cached;
253
254/// Use a timed cache with a TTL of 60s.
255/// Run a background thread to continuously refresh a specific key.
256#[cached(time = 60, key = "String", convert = r#"{ String::from(a) }"#)]
257fn keyed(a: &str) -> usize {
258 a.len()
259}
260pub fn main() {
261 let _handler = std::thread::spawn(|| {
262 loop {
263 sleep(Duration::from_secs(50));
264 // this method is generated by the `cached` macro
265 keyed_prime_cache("a");
266 }
267 });
268 // handler.join().unwrap();
269}
270```
271
272----
273
274```rust
275use std::thread::sleep;
276use std::time::Duration;
277use cached::proc_macro::once;
278
279/// Run a background thread to continuously refresh a singleton.
280#[once]
281fn keyed() -> String {
282 // do some long http request
283 "some data".to_string()
284}
285pub fn main() {
286 let _handler = std::thread::spawn(|| {
287 loop {
288 sleep(Duration::from_secs(60));
289 // this method is generated by the `cached` macro
290 keyed_prime_cache();
291 }
292 });
293 // handler.join().unwrap();
294}
295```
296
297----
298
299```rust
300use std::thread::sleep;
301use std::time::Duration;
302use cached::proc_macro::cached;
303
304/// Run a background thread to continuously refresh every key of a cache
305#[cached(key = "String", convert = r#"{ String::from(a) }"#)]
306fn keyed(a: &str) -> usize {
307 a.len()
308}
309pub fn main() {
310 let _handler = std::thread::spawn(|| {
311 loop {
312 sleep(Duration::from_secs(60));
313 let keys: Vec<String> = {
314 // note the cache keys are a tuple of all function arguments, unless it's one value
315 KEYED.lock().unwrap().get_store().keys().map(|k| k.clone()).collect()
316 };
317 for k in &keys {
318 // this method is generated by the `cached` macro
319 keyed_prime_cache(k);
320 }
321 }
322 });
323 // handler.join().unwrap();
324}
325```
326
327
328*/
329
330#[doc(inline)]
331pub use cached_proc_macro::{cached, io_cached, once};
332#[doc(inline)]
333pub use cached_proc_macro_types::Return;