diff --git a/Cargo.lock b/Cargo.lock index 370a093..e2cebb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -66,6 +66,12 @@ version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "async-lock" version = "3.4.0" @@ -299,6 +305,20 @@ dependencies = [ "tokio", ] +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -464,6 +484,36 @@ dependencies = [ "syn 2.0.77", ] +[[package]] +name = "deadpool" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +dependencies = [ + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-redis" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfae6799b68a735270e4344ee3e834365f707c72da09c9a8bb89b45cc3351395" +dependencies = [ + "deadpool", + "redis", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" +dependencies = [ + "tokio", +] + [[package]] name = "der" version = "0.7.9" @@ -1114,6 +1164,7 @@ dependencies = [ "axum", "axum-extra", "chrono", + "deadpool-redis", "domain", "futures-util", "hex", @@ -1129,6 +1180,7 @@ dependencies = [ "macro", "moka", "once_cell", + "redis", "reqwest", "serde", "serde_json", @@ -1394,6 +1446,16 @@ dependencies = [ "libm", ] +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + [[package]] name = "object" version = "0.36.2" @@ -1689,6 +1751,31 @@ dependencies = [ "bitflags 2.6.0", ] +[[package]] +name = "redis" +version = "0.27.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc6baebe319ef5e4b470f248335620098d1c2e9261e995be05f56f719ca4bdb2" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine", + "futures-util", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "ryu", + "serde", + "serde_json", + "sha1_smol", + "socket2", + "tokio", + "tokio-util", + "url", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -1981,18 +2068,18 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.204" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", @@ -2001,9 +2088,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.122" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "itoa", "memchr", @@ -2078,6 +2165,12 @@ dependencies = [ "digest", ] +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + [[package]] name = "sha2" version = "0.10.8" diff --git a/Cargo.toml b/Cargo.toml index 62a92d2..3c8b1b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,4 +57,6 @@ tower = "0.4.13" csv = "1.3.0" strum = "0.26.3" strum_macros = "0.26.3" -hex = "0.4.3" \ No newline at end of file +hex = "0.4.3" +redis = "0.27.4" +deadpool-redis = "0.18.0" \ No newline at end of file diff --git a/app.toml b/app.toml index 42067b0..346ee11 100644 --- a/app.toml +++ b/app.toml @@ -19,6 +19,11 @@ refresh_token_secret = "chuanyue" expires = 1800 refresh_expires = 3600 +[redis] +url = "47.95.198.7:33000" +password = "3aB7kRt9pDf1nQzW" +db = 0 + [social.wechat] app_id = "wxf0547aa24593a446" app_secret = "e4a29192f9220d4d8c9eab37f8385c37" diff --git a/domain/src/entities/account.rs b/domain/src/entities/account.rs index 5c5277a..1a220bd 100644 --- a/domain/src/entities/account.rs +++ b/domain/src/entities/account.rs @@ -1,10 +1,11 @@ use std::fmt::Display; use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; use sqlx::types::{chrono, JsonValue}; use sqlx::{Error, PgPool, Postgres, Transaction}; -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum Role { Admin, User, @@ -51,7 +52,7 @@ impl Into for String { } } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct Account { pub id: String, pub username: String, diff --git a/i18n/src/lib.rs b/i18n/src/lib.rs index 16912ef..61f54d0 100644 --- a/i18n/src/lib.rs +++ b/i18n/src/lib.rs @@ -46,7 +46,7 @@ fn init() -> HashMap<&'static str, HashMap<&'static str, &'static str>> { i18n_map } -pub fn init_i18n() -> &'static HashMap<&'static str, HashMap<&'static str, &'static str>> { +fn init_i18n() -> &'static HashMap<&'static str, HashMap<&'static str, &'static str>> { I18N.get_or_init(init) } diff --git a/library/Cargo.toml b/library/Cargo.toml index 356db0e..eed4f9f 100644 --- a/library/Cargo.toml +++ b/library/Cargo.toml @@ -21,7 +21,7 @@ serde_json = { workspace = true } http = { workspace = true } http-body = { workspace = true } http-body-util = { workspace = true } -tokio = { workspace = true, features = ["rt-multi-thread", "macros" ] } +tokio = { workspace = true, features = ["full"] } futures-util = { workspace = true } jsonwebtoken = { workspace = true } reqwest = { workspace = true, features = ["blocking", "json"] } @@ -36,6 +36,8 @@ tower-http = { workspace = true, features = ["trace"] } tower = { workspace = true } hyper = { workspace = true } hex = { workspace = true } +redis = { workspace = true, features = ["tokio-comp", "json"] } +deadpool-redis = { workspace = true } domain = { path = "../domain" } i18n = { path = "../i18n" } diff --git a/library/src/cache.rs b/library/src/cache.rs new file mode 100644 index 0000000..910f9a8 --- /dev/null +++ b/library/src/cache.rs @@ -0,0 +1,248 @@ +use std::{fmt::Debug, sync::Arc, time::Duration}; + +use domain::entities::account::Account; +use lazy_static::lazy_static; +use library::config; +use moka::{ + future::{Cache, CacheBuilder}, notification::RemovalCause, policy::EvictionPolicy +}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +use crate::core::redis::REDIS_CONN; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CacheAccount { + pub account: Arc, + pub token: Arc, +} + +pub struct ServerCache { + pub cache: Cache, +} + +impl Default for ServerCache +where + S: std::hash::Hash + Eq + Debug + ToString + Send + Sync + 'static, + D: Debug + Send + Sync + Clone + Serialize + DeserializeOwned + 'static, +{ + fn default() -> Self { + ServerCache { + cache: CacheBuilder::new(20480) + .name("login_cache") + .eviction_policy(EvictionPolicy::lru()) + .time_to_live(Duration::from_secs(config!().jwt.expires as u64)) + .async_eviction_listener(|key: Arc, value: D, cause| Box::pin(async move { + tracing::info!( + "内存缓存数据移除 key: {:?}, value: {:?}, cause: {:?}", + key, + value, + cause + ); + if cause != RemovalCause::Replaced { + RedisCache { + cache_type: std::any::type_name::().to_string(), + cache_key: key.to_string(), + ..Default::default() + }.remove().await; + } + })) + .build(), + } + } +} + +impl ServerCache +where + D: Debug + Send + Sync + Clone + Serialize + DeserializeOwned + 'static, +{ + pub async fn init(&self) { + let cache_type = std::any::type_name::().to_string(); + let cache_datas = RedisCache::get(cache_type).await; + for element in cache_datas { + let cache_key = element.cache_key.clone(); + let data_json = element.data; + let cache_data = serde_json::from_str::(&data_json).unwrap(); + self.add_cache(cache_key, cache_data).await; + } + } + + async fn add_cache(&self, key: String, value: D) { + self.cache.insert(key, value).await; + } + + pub async fn insert(&self, key: String, value: D) { + let cache_data = RedisCache{ + cache_type: std::any::type_name::().to_string(), + data: serde_json::to_string(&value).unwrap(), + cache_key: key.to_string(), + ..Default::default() + }; + cache_data.insert().await; + self.cache.insert(key, value).await; + } + + pub async fn get(&self, key: &str) -> Option { + self.cache.get(key).await + } + + pub async fn remove(&self, key: &str) { + self.cache.remove(key).await; + } +} + +#[derive(Debug, Clone, Default)] +struct RedisCache { + pub cache_type: String, + pub cache_key: String, + pub data: String, +} + +impl RedisCache { + pub async fn insert(&self) { + match REDIS_CONN.hset(&self.cache_type, &self.cache_key, &self.data).await { + Ok(_) => {}, + Err(err) => { + tracing::error!("向缓存Set插入数据失败: {:?}", err); + } + } + } + + pub async fn get(cache_type: String) -> Vec { + let mut result = Vec::new(); + match REDIS_CONN.hgetall(&cache_type).await { + Ok(cache_datas) => { + for (key, value) in cache_datas { + let cache_data = RedisCache { + cache_type: cache_type.clone(), + cache_key: key, + data: value, + }; + result.push(cache_data); + } + }, + Err(err) => { + tracing::error!("从缓存Set获取数据失败: {:?}", err); + }, + } + + result + } + + pub async fn remove(&self) { + match REDIS_CONN.hdel(&self.cache_type, &self.cache_key).await { + Ok(_) => {}, + Err(err) => { + tracing::error!("从缓存Set删除数据失败: {:?}", err); + } + } + } +} + +// impl CacheData { +// /// 向缓存数据表中插入数据 +// /// +// /// 先判断数据是否存在,若已经存在,则删除 +// /// +// /// 然后保存新的缓存数据 +// pub async fn insert(&self) { +// let mut transaction = db!().begin().await.unwrap(); +// let exist_cache_data = match sqlx::query_as!( +// CacheData, +// r#"select * from cache_data where cache_key = $1"#, +// self.cache_key +// ).fetch_all(&mut *transaction).await { +// Ok(data) => data, +// Err(err) => { +// tracing::error!("从数据库获取缓存数据失败: {:?}", err); +// tracing::error!("{}", err); +// Vec::new() +// } +// }; +// for ele in exist_cache_data { +// match sqlx::query!( +// r#" +// delete from cache_data where id = $1 +// "#, +// ele.id +// ) +// .execute(&mut *transaction) +// .await { +// Ok(_) => {}, +// Err(err) => { +// tracing::error!("从数据库删除缓存数据失败: {:?}", err); +// tracing::error!("{}", err); +// transaction.rollback().await.unwrap(); +// return ; +// } +// } +// } + +// match sqlx::query!( +// r#" +// insert into cache_data (cache_type, cache_key, data) +// values ($1, $2, $3) +// "#, +// self.cache_type, +// self.cache_key, +// self.data +// ) +// .execute(&mut *transaction) +// .await +// { +// Ok(_) => transaction.commit().await.unwrap(), +// Err(err) => { +// tracing::error!("向数据库插入缓存数据失败: {:?}", err); +// transaction.rollback().await.unwrap() +// }, +// } +// } + +// pub async fn get(cache_type: String) -> Vec { +// let mut cache_data = Vec::new(); +// match sqlx::query_as!( +// CacheData, +// r#" +// select id, cache_type, cache_key, data from cache_data where cache_type = $1 +// "#, +// cache_type +// ) +// .fetch_all(db!()) +// .await +// { +// Ok(data) => { +// cache_data = data; +// } +// Err(err) => { +// tracing::error!("从数据库获取缓存数据失败: {:?}", err); +// tracing::error!("{}", err); +// } +// } +// cache_data +// } + +// pub async fn remove(cache_key: String) { +// match sqlx::query!( +// r#" +// delete from cache_data where cache_key = $1 +// "#, +// cache_key +// ) +// .execute(db!()) +// .await +// { +// Ok(_) => {}, +// Err(err) => { +// tracing::error!("从数据库删除缓存数据失败: {:?}", err); +// tracing::error!("{}", err); +// } +// } +// } +// } + +pub async fn init_cache() { + LOGIN_ACCOUNT_CACHE.init().await; +} + +lazy_static! { + pub static ref LOGIN_ACCOUNT_CACHE: ServerCache> = ServerCache::default(); +} diff --git a/library/src/cache/account_cache.rs b/library/src/cache/account_cache.rs deleted file mode 100644 index 20d9f63..0000000 --- a/library/src/cache/account_cache.rs +++ /dev/null @@ -1,28 +0,0 @@ -use std::{sync::Arc, time::Duration}; - -use domain::entities::account::Account; -use lazy_static::lazy_static; -use library::config; -use moka::{ - future::{Cache, CacheBuilder}, - policy::EvictionPolicy, -}; - -#[derive(Debug, Clone)] -pub struct CacheAccount { - pub account: Arc, - pub token: Arc, -} - -lazy_static! { - pub static ref LOGIN_CACHE: Cache> = { - CacheBuilder::new(20480) - .name("login_cache") - .eviction_policy(EvictionPolicy::lru()) - .time_to_live(Duration::from_secs(config!().jwt.expires as u64)) - .eviction_listener(|key, value, cause| { - tracing::info!("login_cache evict key: {:?}, value: {:?}, cause: {:?}", key, value, cause); - }) - .build() - }; -} diff --git a/library/src/cache/mod.rs b/library/src/cache/mod.rs deleted file mode 100644 index 2585d14..0000000 --- a/library/src/cache/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod account_cache; \ No newline at end of file diff --git a/library/src/core/config.rs b/library/src/core/config.rs index 44d2113..b50ce95 100644 --- a/library/src/core/config.rs +++ b/library/src/core/config.rs @@ -8,6 +8,7 @@ pub struct Config { pub server: Server, pub logger: Logger, pub database: Database, + pub redis: Redis, pub jwt: Jwt, pub social: Social, } @@ -51,6 +52,13 @@ pub struct Jwt { pub refresh_expires: i64, } +#[derive(Clone, Debug, Deserialize)] +pub struct Redis { + pub url: String, + pub password: String, + pub db: i64, +} + #[derive(Clone, Debug, Deserialize)] pub struct Social { pub wechat: Wechat, diff --git a/library/src/core/db.rs b/library/src/core/db.rs index fd28caf..fca28d4 100644 --- a/library/src/core/db.rs +++ b/library/src/core/db.rs @@ -1,4 +1,4 @@ -use crate::core::config::Config; +use crate::config; use std::sync::OnceLock; use std::time::Duration; use sqlx_postgres::{PgPool, PgPoolOptions}; @@ -6,7 +6,8 @@ use sqlx_postgres::{PgPool, PgPoolOptions}; static DB: OnceLock = OnceLock::new(); /// 初始化数据库连接池 -pub async fn init_database(config: &Config) { +pub async fn init_database() { + let config = config!(); let db_cfg = &config.database; let pool = PgPoolOptions::new() diff --git a/library/src/core/logger.rs b/library/src/core/logger.rs index 2e9ceb9..ed198f5 100644 --- a/library/src/core/logger.rs +++ b/library/src/core/logger.rs @@ -1,4 +1,4 @@ -use crate::core::config::Config; +use crate::config; use chrono::Local; use tracing::Level; use tracing_appender::non_blocking::WorkerGuard; @@ -19,7 +19,8 @@ impl FormatTime for LocalTimer { } /// 初始化日志 -pub fn init_log(config: &Config) -> (WorkerGuard, WorkerGuard) { +pub fn init_log() -> (WorkerGuard, WorkerGuard) { + let config = config!(); let logger_cfg = config.logger.clone(); let (stdout_tracing_appender, std_guard) = tracing_appender::non_blocking(std::io::stdout()); let (file_tracing_appender, file_guard) = tracing_appender::non_blocking( diff --git a/library/src/core/mod.rs b/library/src/core/mod.rs index 170293b..5cc2f1b 100644 --- a/library/src/core/mod.rs +++ b/library/src/core/mod.rs @@ -1,3 +1,4 @@ pub mod config; pub mod logger; pub mod db; +pub mod redis; \ No newline at end of file diff --git a/library/src/core/redis.rs b/library/src/core/redis.rs new file mode 100644 index 0000000..d6b5642 --- /dev/null +++ b/library/src/core/redis.rs @@ -0,0 +1,180 @@ +use deadpool_redis::{Config, Connection, Pool}; +use lazy_static::lazy_static; +use redis::Cmd; + +use crate::{config, model::response::{ResErr, ResResult}}; + +pub struct RedisConnManager { + pool: Pool +} + +impl Default for RedisConnManager { + fn default() -> Self { + let redis = &config!().redis; + let url = format!("redis://:{}@{}/{}", redis.password, redis.url, redis.db); + // tracing::info!("redis url: {}", url); + let cfg = Config::from_url(url); + let pool = cfg.create_pool(Some(deadpool_redis::Runtime::Tokio1)).expect("获取redis连接失败"); + Self { pool } + } +} + +impl RedisConnManager { + pub async fn set(&self, key: &str, value: &str) -> ResResult<()> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::set(key, value).exec_async(&mut conn).await { + Ok(_) => Ok(()), + Err(err) => { + tracing::error!("redis set key:{} value:{} error:{}", key, value, err); + Err(ResErr::service("redis 执行 set 命令失败")) + }, + } + } + + pub async fn setexp(&self, key: &str, value: &str, seconds: u64) -> ResResult<()> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::set_ex(key, value, seconds).exec_async(&mut conn).await { + Ok(_) => Ok(()), + Err(err) => { + tracing::error!("redis setex key:{} value:{} seconds:{} error:{}", key, value, seconds, err); + Err(ResErr::service("redis 执行 setex 命令失败")) + }, + } + } + + pub async fn get(&self, key: &str) -> ResResult { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::get(key).query_async::(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis get key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 get 命令失败")) + }, + } + } + + pub async fn del(&self, key: &str) -> ResResult<()> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::del(key).query_async::(&mut conn).await { + Ok(_) => Ok(()), + Err(err) => { + tracing::error!("redis del key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 del 命令失败")) + }, + } + } + + pub async fn hset(&self, key: &str, field: &str, value: &str) -> ResResult<()> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hset(key, field, value).query_async::(&mut conn).await { + Ok(_) => Ok(()), + Err(err) => { + tracing::error!("redis hset key:{} field:{} value:{} error:{}", key, field, value, err); + Err(ResErr::service("redis 执行 hset 命令失败")) + }, + } + } + + pub async fn hget(&self, key: &str, field: &str) -> ResResult { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hget(key, field).query_async::(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis hget key:{} field:{} error:{}", key, field, err); + Err(ResErr::service("redis 执行 hget 命令失败")) + }, + } + } + + pub async fn hdel(&self, key: &str, field: &str) -> ResResult<()> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hdel(key, field).query_async::(&mut conn).await { + Ok(_) => Ok(()), + Err(err) => { + tracing::error!("redis hdel key:{} field:{} error:{}", key, field, err); + Err(ResErr::service("redis 执行 hdel 命令失败")) + }, + } + } + + pub async fn hgetall(&self, key: &str) -> ResResult> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hgetall(key).query_async::>(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis hgetall key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 hgetall 命令失败")) + }, + } + } + + pub async fn hkeys(&self, key: &str) -> ResResult> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hkeys(key).query_async::>(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis hkeys key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 hkeys 命令失败")) + }, + } + } + + pub async fn hvals(&self, key: &str) -> ResResult> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hvals(key).query_async::>(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis hvals key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 hvals 命令失败")) + }, + } + } + + pub async fn hlen(&self, key: &str) -> ResResult { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::hlen(key).query_async::(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis hlen key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 hlen 命令失败")) + }, + } + } + + pub async fn expire(&self, key: &str, seconds: i64) -> ResResult<()> { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::expire(key, seconds).query_async::(&mut conn).await { + Ok(_) => Ok(()), + Err(err) => { + tracing::error!("redis expire key:{} seconds:{} error:{}", key, seconds, err); + Err(ResErr::service("redis 执行 expire 命令失败")) + }, + } + } + + pub async fn ttl(&self, key: &str) -> ResResult { + let mut conn = self.get_conn().await.unwrap(); + match Cmd::ttl(key).query_async::(&mut conn).await { + Ok(result) => Ok(result), + Err(err) => { + tracing::error!("redis ttl key:{} error:{}", key, err); + Err(ResErr::service("redis 执行 ttl 命令失败")) + }, + } + } + + #[inline] + async fn get_conn(&self) -> ResResult { + match self.pool.get().await { + Ok(conn) => Ok(conn), + Err(err) => { + tracing::error!("redis get_conn error:{}", err); + Err(ResErr::service("redis 连接池获取连接失败")) + }, + } + } +} + +lazy_static! { + pub static ref REDIS_CONN: RedisConnManager = RedisConnManager::default(); +} \ No newline at end of file diff --git a/library/src/middleware/req_ctx.rs b/library/src/middleware/req_ctx.rs index cf2949f..c679405 100644 --- a/library/src/middleware/req_ctx.rs +++ b/library/src/middleware/req_ctx.rs @@ -5,7 +5,7 @@ use http::header; use i18n::{message, message_ids::MessageId}; use jsonwebtoken::{decode, DecodingKey, Validation}; -use crate::{cache::account_cache::LOGIN_CACHE, config, context::Context, model::response::ResErr, token::Claims, utils::request_util}; +use crate::{cache::LOGIN_ACCOUNT_CACHE, config, context::Context, model::response::ResErr, token::Claims, utils::request_util}; const WHITE_LIST: &[(&str, &str)] = &[ ("POST", "/account/sys"), @@ -56,7 +56,7 @@ pub async fn authenticate_ctx(mut req: Request, next: Next) -> Response { match decode::(token, &DecodingKey::from_secret(config!().jwt.token_secret.as_bytes()), &validation) { Ok(decoded) => { // 从缓存中获取当前用户信息 - let account = LOGIN_CACHE.get(&decoded.claims.sub).await; + let account = LOGIN_ACCOUNT_CACHE.get(&decoded.claims.sub).await; if account.is_none() { tracing::error!("无效的 token"); // 解析语言 diff --git a/server/src/service/account_service.rs b/server/src/service/account_service.rs index 0349354..ce46eb6 100644 --- a/server/src/service/account_service.rs +++ b/server/src/service/account_service.rs @@ -6,7 +6,7 @@ use domain::entities::account::Account; use domain::vo::account::{LoginAccount, RefreshTokenResult}; use i18n::message; use i18n::message_ids::MessageId; -use library::cache::account_cache::{CacheAccount, LOGIN_CACHE}; +use library::cache::{CacheAccount, LOGIN_ACCOUNT_CACHE}; use library::context::Context; use library::model::response::ResErr::ErrPerm; use library::model::response::{ResErr, ResResult}; @@ -73,7 +73,7 @@ pub async fn authenticate_google( let token = token::generate_token(&account.id); let refresh_token = token::generate_refresh_token(&account.id); - LOGIN_CACHE + LOGIN_ACCOUNT_CACHE .insert( account.id.to_owned(), Arc::new(CacheAccount { @@ -115,8 +115,8 @@ pub async fn refresh_token( refresh_token: generate_refresh_token(&account.id), }; - LOGIN_CACHE.remove(&account.id).await; - LOGIN_CACHE + LOGIN_ACCOUNT_CACHE.remove(&account.id).await; + LOGIN_ACCOUNT_CACHE .insert( account.id.to_owned(), Arc::new(CacheAccount { diff --git a/server/src/service/sys_account_service.rs b/server/src/service/sys_account_service.rs index 5bcd310..330eea0 100644 --- a/server/src/service/sys_account_service.rs +++ b/server/src/service/sys_account_service.rs @@ -11,7 +11,7 @@ use i18n::{ message_ids::MessageId, }; use library::{ - cache::account_cache::{CacheAccount, LOGIN_CACHE}, context::Context, db, model::response::{ResErr, ResResult}, token::{generate_refresh_token, generate_token} + cache::{CacheAccount, LOGIN_ACCOUNT_CACHE}, context::Context, db, model::response::{ResErr, ResResult}, token::{generate_refresh_token, generate_token} }; /// 登录, 使用账号和密码 @@ -49,7 +49,7 @@ pub async fn authenticate_with_password( let token = generate_token(&account.id); let refresh_token = generate_refresh_token(&account.id); - LOGIN_CACHE + LOGIN_ACCOUNT_CACHE .insert( account.id.to_owned(), Arc::new(CacheAccount { diff --git a/src/main.rs b/src/main.rs index d4b8fdf..739adbc 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,3 @@ -use library::config; use mimalloc::MiMalloc; #[global_allocator] @@ -6,9 +5,10 @@ static GLOBAL: MiMalloc = MiMalloc; #[tokio::main] async fn main() { - let (_std_guard, _file_guard) = library::core::logger::init_log(config!()); - let _i18n = i18n::init_i18n(); - library::core::db::init_database(config!()).await; + // 初始化日志、数据库连接池、内存缓存;初始化顺序不可变更 + let (_std_guard, _file_guard) = library::core::logger::init_log(); + library::core::db::init_database().await; + library::cache::init_cache().await; server::serve().await; }