openzeppelin_relayer/repositories/api_key/
api_key_redis.rs1use crate::models::{ApiKeyRepoModel, PaginationQuery, RepositoryError};
4use crate::repositories::redis_base::RedisRepository;
5use crate::repositories::{ApiKeyRepositoryTrait, BatchRetrievalResult, PaginatedResult};
6use crate::utils::{EncryptionContext, RedisConnections};
7use async_trait::async_trait;
8use redis::AsyncCommands;
9use std::fmt;
10use std::sync::Arc;
11use tracing::{debug, error, warn};
12
13const API_KEY_PREFIX: &str = "apikey";
14const API_KEY_LIST_KEY: &str = "apikey_list";
15
16#[derive(Clone)]
17pub struct RedisApiKeyRepository {
18 pub connections: Arc<RedisConnections>,
19 pub key_prefix: String,
20}
21
22impl RedisRepository for RedisApiKeyRepository {}
23
24impl RedisApiKeyRepository {
25 pub fn new(
26 connections: Arc<RedisConnections>,
27 key_prefix: String,
28 ) -> Result<Self, RepositoryError> {
29 if key_prefix.is_empty() {
30 return Err(RepositoryError::InvalidData(
31 "Redis key prefix cannot be empty".to_string(),
32 ));
33 }
34
35 Ok(Self {
36 connections,
37 key_prefix,
38 })
39 }
40
41 fn api_key_key(&self, api_key_id: &str) -> String {
43 format!("{}:{}:{}", self.key_prefix, API_KEY_PREFIX, api_key_id)
44 }
45
46 fn api_key_list_key(&self) -> String {
48 format!("{}:{}", self.key_prefix, API_KEY_LIST_KEY)
49 }
50
51 async fn get_by_ids(
52 &self,
53 ids: &[String],
54 ) -> Result<BatchRetrievalResult<ApiKeyRepoModel>, RepositoryError> {
55 if ids.is_empty() {
56 debug!("No api key IDs provided for batch fetch");
57 return Ok(BatchRetrievalResult {
58 results: vec![],
59 failed_ids: vec![],
60 });
61 }
62
63 let mut conn = self
64 .get_connection(self.connections.reader(), "get_by_ids")
65 .await?;
66 let keys: Vec<String> = ids.iter().map(|id| self.api_key_key(id)).collect();
67
68 let values: Vec<Option<String>> = conn
69 .mget(&keys)
70 .await
71 .map_err(|e| self.map_redis_error(e, "batch_fetch_api_keys"))?;
72
73 let mut apikeys = Vec::new();
74 let mut failed_count = 0;
75 let mut failed_ids = Vec::new();
76 for (i, value) in values.into_iter().enumerate() {
77 match value {
78 Some(json) => {
79 let key = keys[i].clone();
80 match EncryptionContext::with_aad_sync(key, || {
81 self.deserialize_entity::<ApiKeyRepoModel>(&json, &ids[i], "apikey")
82 }) {
83 Ok(apikey) => apikeys.push(apikey),
84 Err(e) => {
85 failed_count += 1;
86 error!("Failed to deserialize api key {}: {}", ids[i], e);
87 failed_ids.push(ids[i].clone());
88 }
89 }
90 }
91 None => {
92 warn!("Plugin {} not found in batch fetch", ids[i]);
93 }
94 }
95 }
96
97 if failed_count > 0 {
98 warn!(
99 "Failed to deserialize {} out of {} api keys in batch",
100 failed_count,
101 ids.len()
102 );
103 }
104
105 Ok(BatchRetrievalResult {
106 results: apikeys,
107 failed_ids,
108 })
109 }
110}
111
112impl fmt::Debug for RedisApiKeyRepository {
113 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
114 write!(
115 f,
116 "RedisApiKeyRepository {{ key_prefix: {} }}",
117 self.key_prefix
118 )
119 }
120}
121
122#[async_trait]
123impl ApiKeyRepositoryTrait for RedisApiKeyRepository {
124 async fn create(&self, entity: ApiKeyRepoModel) -> Result<ApiKeyRepoModel, RepositoryError> {
125 if entity.id.is_empty() {
126 return Err(RepositoryError::InvalidData(
127 "API Key ID cannot be empty".to_string(),
128 ));
129 }
130
131 let key = self.api_key_key(&entity.id);
132 let list_key = self.api_key_list_key();
133 let json = EncryptionContext::with_aad_sync(key.clone(), || {
135 self.serialize_entity(&entity, |a| &a.id, "apikey")
136 })?;
137
138 let mut conn = self
139 .get_connection(self.connections.primary(), "create")
140 .await?;
141
142 let existing: Option<String> = conn
143 .get(&key)
144 .await
145 .map_err(|e| self.map_redis_error(e, "create_api_key_check"))?;
146
147 if existing.is_some() {
148 return Err(RepositoryError::ConstraintViolation(format!(
149 "API Key with ID {} already exists",
150 entity.id
151 )));
152 }
153
154 let mut pipe = redis::pipe();
156 pipe.atomic();
157 pipe.set(&key, json);
158 pipe.sadd(&list_key, &entity.id);
159
160 pipe.exec_async(&mut conn)
161 .await
162 .map_err(|e| self.map_redis_error(e, "create_api_key"))?;
163
164 debug!("Successfully created API Key {}", entity.id);
165 Ok(entity)
166 }
167
168 async fn list_paginated(
169 &self,
170 query: PaginationQuery,
171 ) -> Result<PaginatedResult<ApiKeyRepoModel>, RepositoryError> {
172 if query.page == 0 {
173 return Err(RepositoryError::InvalidData(
174 "Page number must be greater than 0".to_string(),
175 ));
176 }
177
178 if query.per_page == 0 {
179 return Err(RepositoryError::InvalidData(
180 "Per page count must be greater than 0".to_string(),
181 ));
182 }
183 let (total, ids_to_query) = {
184 let mut conn = self
185 .get_connection(self.connections.reader(), "list_paginated")
186 .await?;
187 let api_key_list_key = self.api_key_list_key();
188
189 let total: u64 = conn
191 .scard(&api_key_list_key)
192 .await
193 .map_err(|e| self.map_redis_error(e, "list_paginated_count"))?;
194
195 if total == 0 {
196 return Ok(PaginatedResult {
197 items: vec![],
198 total: 0,
199 page: query.page,
200 per_page: query.per_page,
201 });
202 }
203
204 let all_ids: Vec<String> = conn
206 .smembers(&api_key_list_key)
207 .await
208 .map_err(|e| self.map_redis_error(e, "list_paginated_members"))?;
209
210 let start = ((query.page - 1) * query.per_page) as usize;
211 let end = (start + query.per_page as usize).min(all_ids.len());
212
213 (total, all_ids[start..end].to_vec())
214 };
216
217 let items = self.get_by_ids(&ids_to_query).await?;
218
219 Ok(PaginatedResult {
220 items: items.results.clone(),
221 total,
222 page: query.page,
223 per_page: query.per_page,
224 })
225 }
226
227 async fn get_by_id(&self, id: &str) -> Result<Option<ApiKeyRepoModel>, RepositoryError> {
228 if id.is_empty() {
229 return Err(RepositoryError::InvalidData(
230 "API Key ID cannot be empty".to_string(),
231 ));
232 }
233
234 let mut conn = self
235 .get_connection(self.connections.reader(), "get_by_id")
236 .await?;
237 let api_key_key = self.api_key_key(id);
238
239 debug!("Fetching api key with ID: {}", id);
240
241 let json: Option<String> = conn
242 .get(&api_key_key)
243 .await
244 .map_err(|e| self.map_redis_error(e, "get_api_key_by_id"))?;
245
246 match json {
247 Some(json) => {
248 debug!("Found api key with ID: {}", id);
249 EncryptionContext::with_aad_sync(api_key_key, || {
251 self.deserialize_entity::<ApiKeyRepoModel>(&json, id, "apikey")
252 })
253 .map(Some)
254 }
255 None => {
256 debug!("Api key with ID {} not found", id);
257 Ok(None)
258 }
259 }
260 }
261
262 async fn list_permissions(&self, api_key_id: &str) -> Result<Vec<String>, RepositoryError> {
263 let api_key = self.get_by_id(api_key_id).await?;
264 match api_key {
265 Some(api_key) => Ok(api_key.permissions),
266 None => Err(RepositoryError::NotFound(format!(
267 "Api key with ID {api_key_id} not found"
268 ))),
269 }
270 }
271
272 async fn delete_by_id(&self, id: &str) -> Result<(), RepositoryError> {
273 if id.is_empty() {
274 return Err(RepositoryError::InvalidData(
275 "API Key ID cannot be empty".to_string(),
276 ));
277 }
278
279 let key = self.api_key_key(id);
280 let api_key_list_key = self.api_key_list_key();
281 let mut conn = self
282 .get_connection(self.connections.primary(), "delete_by_id")
283 .await?;
284
285 debug!("Deleting api key with ID: {}", id);
286
287 let existing: Option<String> = conn
289 .get(&key)
290 .await
291 .map_err(|e| self.map_redis_error(e, "delete_api_key_check"))?;
292
293 if existing.is_none() {
294 return Err(RepositoryError::NotFound(format!(
295 "Api key with ID {id} not found"
296 )));
297 }
298
299 let mut pipe = redis::pipe();
301 pipe.atomic();
302 pipe.del(&key);
303 pipe.srem(&api_key_list_key, id);
304
305 pipe.exec_async(&mut conn)
306 .await
307 .map_err(|e| self.map_redis_error(e, "delete_api_key"))?;
308
309 debug!("Successfully deleted api key {}", id);
310 Ok(())
311 }
312
313 async fn count(&self) -> Result<usize, RepositoryError> {
314 let mut conn = self
315 .get_connection(self.connections.reader(), "count")
316 .await?;
317 let api_key_list_key = self.api_key_list_key();
318
319 let count: u64 = conn
320 .scard(&api_key_list_key)
321 .await
322 .map_err(|e| self.map_redis_error(e, "count_api_keys"))?;
323
324 Ok(count as usize)
325 }
326
327 async fn has_entries(&self) -> Result<bool, RepositoryError> {
328 let mut conn = self
329 .get_connection(self.connections.reader(), "has_entries")
330 .await?;
331 let plugin_list_key = self.api_key_list_key();
332
333 debug!("Checking if plugin entries exist");
334
335 let exists: bool = conn
336 .exists(&plugin_list_key)
337 .await
338 .map_err(|e| self.map_redis_error(e, "has_entries_check"))?;
339
340 debug!("Plugin entries exist: {}", exists);
341 Ok(exists)
342 }
343
344 async fn drop_all_entries(&self) -> Result<(), RepositoryError> {
345 let mut conn = self
346 .get_connection(self.connections.primary(), "drop_all_entries")
347 .await?;
348 let plugin_list_key = self.api_key_list_key();
349
350 debug!("Dropping all plugin entries");
351
352 let plugin_ids: Vec<String> = conn
354 .smembers(&plugin_list_key)
355 .await
356 .map_err(|e| self.map_redis_error(e, "drop_all_entries_get_ids"))?;
357
358 if plugin_ids.is_empty() {
359 debug!("No plugin entries to drop");
360 return Ok(());
361 }
362
363 let mut pipe = redis::pipe();
365 pipe.atomic();
366
367 for plugin_id in &plugin_ids {
369 let plugin_key = self.api_key_key(plugin_id);
370 pipe.del(&plugin_key);
371 }
372
373 pipe.del(&plugin_list_key);
375
376 pipe.exec_async(&mut conn)
377 .await
378 .map_err(|e| self.map_redis_error(e, "drop_all_entries_pipeline"))?;
379
380 debug!("Dropped {} plugin entries", plugin_ids.len());
381 Ok(())
382 }
383}
384
385#[cfg(test)]
386mod tests {
387 use crate::models::SecretString;
388
389 use super::*;
390 use chrono::Utc;
391
392 fn create_test_api_key(id: &str) -> ApiKeyRepoModel {
393 ApiKeyRepoModel {
394 id: id.to_string(),
395 value: SecretString::new("test-value"),
396 name: "test-name".to_string(),
397 allowed_origins: vec!["*".to_string()],
398 permissions: vec!["relayer:all:execute".to_string()],
399 created_at: Utc::now().to_string(),
400 }
401 }
402
403 async fn setup_test_repo() -> RedisApiKeyRepository {
404 let redis_url =
405 std::env::var("REDIS_URL").unwrap_or_else(|_| "redis://127.0.0.1:6379/".to_string());
406 let cfg = deadpool_redis::Config::from_url(&redis_url);
407 let pool = Arc::new(
408 cfg.builder()
409 .expect("Failed to create pool builder")
410 .max_size(16)
411 .runtime(deadpool_redis::Runtime::Tokio1)
412 .build()
413 .expect("Failed to build Redis pool"),
414 );
415 let connections = Arc::new(RedisConnections::new_single_pool(pool));
416
417 let random_id = uuid::Uuid::new_v4().to_string();
418 let key_prefix = format!("test_prefix:{random_id}");
419
420 RedisApiKeyRepository::new(connections, key_prefix)
421 .expect("Failed to create Redis api key repository")
422 }
423
424 #[tokio::test]
425 #[ignore = "Requires active Redis instance"]
426 async fn test_new_repository_creation() {
427 let repo = setup_test_repo().await;
428 assert!(repo.key_prefix.contains("test_prefix"));
429 }
430
431 #[tokio::test]
432 #[ignore = "Requires active Redis instance"]
433 async fn test_new_repository_empty_prefix_fails() {
434 let redis_url = "redis://127.0.0.1:6379/";
435 let cfg = deadpool_redis::Config::from_url(redis_url);
436 let pool = Arc::new(
437 cfg.builder()
438 .expect("Failed to create pool builder")
439 .max_size(16)
440 .runtime(deadpool_redis::Runtime::Tokio1)
441 .build()
442 .expect("Failed to build Redis pool"),
443 );
444 let connections = Arc::new(RedisConnections::new_single_pool(pool));
445
446 let result = RedisApiKeyRepository::new(connections, "".to_string());
447 assert!(result.is_err());
448 assert!(result
449 .unwrap_err()
450 .to_string()
451 .contains("key prefix cannot be empty"));
452 }
453
454 #[tokio::test]
455 #[ignore = "Requires active Redis instance"]
456 async fn test_key_generation() {
457 let repo = setup_test_repo().await;
458
459 let api_key_key = repo.api_key_key("test-api-key");
460 assert!(api_key_key.contains(":apikey:test-api-key"));
461
462 let list_key = repo.api_key_list_key();
463 assert!(list_key.contains(":apikey_list"));
464 }
465
466 #[tokio::test]
467 #[ignore = "Requires active Redis instance"]
468 async fn test_serialize_deserialize_api_key() {
469 let repo = setup_test_repo().await;
470 let api_key = create_test_api_key("test-api-key");
471
472 let json = repo
473 .serialize_entity(&api_key, |a| &a.id, "apikey")
474 .unwrap();
475 let deserialized: ApiKeyRepoModel = repo
476 .deserialize_entity(&json, &api_key.id, "apikey")
477 .unwrap();
478
479 assert_eq!(api_key.id, deserialized.id);
480 assert_eq!(api_key.value, deserialized.value);
481 assert_eq!(api_key.name, deserialized.name);
482 assert_eq!(api_key.allowed_origins, deserialized.allowed_origins);
483 assert_eq!(api_key.permissions, deserialized.permissions);
484 assert_eq!(api_key.created_at, deserialized.created_at);
485 }
486
487 #[tokio::test]
488 #[ignore = "Requires active Redis instance"]
489 async fn test_create_api_key() {
490 let repo = setup_test_repo().await;
491 let api_key_id = uuid::Uuid::new_v4().to_string();
492 let api_key = create_test_api_key(&api_key_id);
493
494 let result = repo.create(api_key.clone()).await;
495 assert!(result.is_ok());
496
497 let retrieved = repo.get_by_id(&api_key_id).await.unwrap();
498 assert!(retrieved.is_some());
499 let retrieved = retrieved.unwrap();
500 assert_eq!(retrieved.id, api_key.id);
501 assert_eq!(retrieved.value, api_key.value);
502 }
503
504 #[tokio::test]
505 #[ignore = "Requires active Redis instance"]
506 async fn test_get_nonexistent_api_key() {
507 let repo = setup_test_repo().await;
508
509 let result = repo.get_by_id("nonexistent-api-key").await;
510 assert!(matches!(result, Ok(None)));
511 }
512
513 #[tokio::test]
514 #[ignore = "Requires active Redis instance"]
515 async fn test_error_handling_empty_id() {
516 let repo = setup_test_repo().await;
517
518 let result = repo.get_by_id("").await;
519 assert!(result.is_err());
520 assert!(result
521 .unwrap_err()
522 .to_string()
523 .contains("ID cannot be empty"));
524 }
525
526 #[tokio::test]
527 #[ignore = "Requires active Redis instance"]
528 async fn test_get_by_ids_api_keys() {
529 let repo = setup_test_repo().await;
530 let api_key_id1 = uuid::Uuid::new_v4().to_string();
531 let api_key_id2 = uuid::Uuid::new_v4().to_string();
532 let api_key1 = create_test_api_key(&api_key_id1);
533 let api_key2 = create_test_api_key(&api_key_id2);
534
535 repo.create(api_key1.clone()).await.unwrap();
536 repo.create(api_key2.clone()).await.unwrap();
537
538 let retrieved = repo
539 .get_by_ids(&[api_key1.id.clone(), api_key2.id.clone()])
540 .await
541 .unwrap();
542 assert!(retrieved.results.len() == 2);
543 assert_eq!(retrieved.results[0].id, api_key1.id);
544 assert_eq!(retrieved.results[1].id, api_key2.id);
545 assert_eq!(retrieved.failed_ids.len(), 0);
546 }
547
548 #[tokio::test]
549 #[ignore = "Requires active Redis instance"]
550 async fn test_list_paginated_api_keys() {
551 let repo = setup_test_repo().await;
552
553 let api_key_id1 = uuid::Uuid::new_v4().to_string();
554 let api_key_id2 = uuid::Uuid::new_v4().to_string();
555 let api_key_id3 = uuid::Uuid::new_v4().to_string();
556 let api_key1 = create_test_api_key(&api_key_id1);
557 let api_key2 = create_test_api_key(&api_key_id2);
558 let api_key3 = create_test_api_key(&api_key_id3);
559
560 repo.create(api_key1.clone()).await.unwrap();
561 repo.create(api_key2.clone()).await.unwrap();
562 repo.create(api_key3.clone()).await.unwrap();
563
564 let query = PaginationQuery {
565 page: 1,
566 per_page: 2,
567 };
568
569 let result = repo.list_paginated(query).await;
570 assert!(result.is_ok());
571 let result = result.unwrap();
572 println!("result: {result:?}");
573 assert!(result.items.len() == 2);
574 }
575
576 #[tokio::test]
577 #[ignore = "Requires active Redis instance"]
578 async fn test_has_entries() {
579 let repo = setup_test_repo().await;
580 assert!(!repo.has_entries().await.unwrap());
581 repo.create(create_test_api_key("test-api-key"))
582 .await
583 .unwrap();
584 assert!(repo.has_entries().await.unwrap());
585 repo.drop_all_entries().await.unwrap();
586 assert!(!repo.has_entries().await.unwrap());
587 }
588}