]> Untitled Git - lemmy.git/blobdiff - crates/db_schema/src/utils.rs
Feature add hours as sorting options backend (#3161)
[lemmy.git] / crates / db_schema / src / utils.rs
index cf96b5e77f2f322783b2950eafc8905db9adcb4f..98d3952abc1c5d2b96ad9de1eaf8ad73cfd9f663 100644 (file)
@@ -5,9 +5,9 @@ use crate::{
   CommentSortType,
   SortType,
 };
-use activitypub_federation::{core::object_id::ObjectId, traits::ApubObject};
-use bb8::PooledConnection;
+use activitypub_federation::{fetch::object_id::ObjectId, traits::Object};
 use chrono::NaiveDateTime;
+use deadpool::Runtime;
 use diesel::{
   backend::Backend,
   deserialize::FromSql,
@@ -19,24 +19,26 @@ use diesel::{
 };
 use diesel_async::{
   pg::AsyncPgConnection,
-  pooled_connection::{bb8::Pool, AsyncDieselConnectionManager},
+  pooled_connection::{
+    deadpool::{Object as PooledConnection, Pool},
+    AsyncDieselConnectionManager,
+  },
 };
 use diesel_migrations::EmbeddedMigrations;
 use lemmy_utils::{error::LemmyError, settings::structs::Settings};
 use once_cell::sync::Lazy;
 use regex::Regex;
-use std::{env, env::VarError};
+use std::{env, env::VarError, time::Duration};
 use tracing::info;
 use url::Url;
 
 const FETCH_LIMIT_DEFAULT: i64 = 10;
 pub const FETCH_LIMIT_MAX: i64 = 50;
+const POOL_TIMEOUT: Option<Duration> = Some(Duration::from_secs(5));
 
 pub type DbPool = Pool<AsyncPgConnection>;
 
-pub async fn get_conn(
-  pool: &DbPool,
-) -> Result<PooledConnection<AsyncDieselConnectionManager<AsyncPgConnection>>, DieselError> {
+pub async fn get_conn(pool: &DbPool) -> Result<PooledConnection<AsyncPgConnection>, DieselError> {
   pool.get().await.map_err(|e| QueryBuilderError(e.into()))
 }
 
@@ -46,7 +48,7 @@ pub fn get_database_url_from_env() -> Result<String, VarError> {
 
 pub fn fuzzy_search(q: &str) -> String {
   let replaced = q.replace('%', "\\%").replace('_', "\\_").replace(' ', "%");
-  format!("%{}%", replaced)
+  format!("%{replaced}%")
 }
 
 pub fn limit_and_offset(
@@ -67,7 +69,7 @@ pub fn limit_and_offset(
     Some(limit) => {
       if !(1..=FETCH_LIMIT_MAX).contains(&limit) {
         return Err(QueryBuilderError(
-          format!("Fetch limit is > {}", FETCH_LIMIT_MAX).into(),
+          format!("Fetch limit is > {FETCH_LIMIT_MAX}").into(),
         ));
       } else {
         limit
@@ -135,11 +137,13 @@ async fn build_db_pool_settings_opt(settings: Option<&Settings>) -> Result<DbPoo
   let db_url = get_database_url(settings);
   let pool_size = settings.map(|s| s.database.pool_size).unwrap_or(5);
   let manager = AsyncDieselConnectionManager::<AsyncPgConnection>::new(&db_url);
-  let pool = Pool::builder()
+  let pool = Pool::builder(manager)
     .max_size(pool_size)
-    .min_idle(Some(1))
-    .build(manager)
-    .await?;
+    .wait_timeout(POOL_TIMEOUT)
+    .create_timeout(POOL_TIMEOUT)
+    .recycle_timeout(POOL_TIMEOUT)
+    .runtime(Runtime::Tokio1)
+    .build()?;
 
   // If there's no settings, that means its a unit test, and migrations need to be run
   if settings.is_none() {
@@ -154,11 +158,11 @@ pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
 pub fn run_migrations(db_url: &str) {
   // Needs to be a sync connection
   let mut conn =
-    PgConnection::establish(db_url).unwrap_or_else(|_| panic!("Error connecting to {}", db_url));
+    PgConnection::establish(db_url).unwrap_or_else(|e| panic!("Error connecting to {db_url}: {e}"));
   info!("Running Database migrations (This may take a long time)...");
   let _ = &mut conn
     .run_pending_migrations(MIGRATIONS)
-    .unwrap_or_else(|_| panic!("Couldn't run DB Migrations"));
+    .unwrap_or_else(|e| panic!("Couldn't run DB Migrations: {e}"));
   info!("Database migrations complete.");
 }
 
@@ -178,10 +182,7 @@ pub fn get_database_url(settings: Option<&Settings>) -> String {
     Ok(url) => url,
     Err(e) => match settings {
       Some(settings) => settings.get_database_url(),
-      None => panic!(
-        "Failed to read database URL from env var LEMMY_DATABASE_URL: {}",
-        e
-      ),
+      None => panic!("Failed to read database URL from env var LEMMY_DATABASE_URL: {e}"),
     },
   }
 }
@@ -195,7 +196,10 @@ pub fn post_to_comment_sort_type(sort: SortType) -> CommentSortType {
     SortType::Active | SortType::Hot => CommentSortType::Hot,
     SortType::New | SortType::NewComments | SortType::MostComments => CommentSortType::New,
     SortType::Old => CommentSortType::Old,
-    SortType::TopDay
+    SortType::TopHour
+    | SortType::TopSixHour
+    | SortType::TopTwelveHour
+    | SortType::TopDay
     | SortType::TopAll
     | SortType::TopWeek
     | SortType::TopYear
@@ -228,19 +232,19 @@ impl<DB: Backend> FromSql<Text, DB> for DbUrl
 where
   String: FromSql<Text, DB>,
 {
-  fn from_sql(value: diesel::backend::RawValue<'_, DB>) -> diesel::deserialize::Result<Self> {
+  fn from_sql(value: DB::RawValue<'_>) -> diesel::deserialize::Result<Self> {
     let str = String::from_sql(value)?;
-    Ok(DbUrl(Url::parse(&str)?))
+    Ok(DbUrl(Box::new(Url::parse(&str)?)))
   }
 }
 
 impl<Kind> From<ObjectId<Kind>> for DbUrl
 where
-  Kind: ApubObject + Send + 'static,
-  for<'de2> <Kind as ApubObject>::ApubType: serde::Deserialize<'de2>,
+  Kind: Object + Send + 'static,
+  for<'de2> <Kind as Object>::Kind: serde::Deserialize<'de2>,
 {
   fn from(id: ObjectId<Kind>) -> Self {
-    DbUrl(id.into())
+    DbUrl(Box::new(id.into()))
   }
 }