2020-07-14 16:00:09 +00:00
use std ::process ::Command ;
2018-02-10 00:00:55 +00:00
2019-06-02 11:35:01 +00:00
use chrono ::prelude ::* ;
2020-08-18 15:15:44 +00:00
use diesel ::r2d2 ::{ ConnectionManager , Pool , PooledConnection } ;
2020-07-14 16:00:09 +00:00
use rocket ::{
http ::Status ,
2020-07-19 19:01:31 +00:00
request ::{ FromRequest , Outcome } ,
Request , State ,
2020-07-14 16:00:09 +00:00
} ;
2019-05-03 13:46:29 +00:00
2020-08-18 15:15:44 +00:00
use crate ::{
error ::{ Error , MapResult } ,
CONFIG ,
} ;
2018-02-10 00:00:55 +00:00
2020-08-18 15:15:44 +00:00
#[ cfg(sqlite) ]
2019-05-26 21:02:41 +00:00
#[ path = " schemas/sqlite/schema.rs " ]
2020-08-18 15:15:44 +00:00
pub mod __sqlite_schema ;
#[ cfg(mysql) ]
2019-05-26 21:02:41 +00:00
#[ path = " schemas/mysql/schema.rs " ]
2020-08-18 15:15:44 +00:00
pub mod __mysql_schema ;
#[ cfg(postgresql) ]
2019-09-12 20:12:22 +00:00
#[ path = " schemas/postgresql/schema.rs " ]
2020-08-18 15:15:44 +00:00
pub mod __postgresql_schema ;
// This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported
macro_rules ! generate_connections {
( $( $name :ident : $ty :ty ) , + ) = > {
#[ allow(non_camel_case_types, dead_code) ]
#[ derive(Eq, PartialEq) ]
pub enum DbConnType { $( $name , ) + }
#[ allow(non_camel_case_types) ]
pub enum DbConn { $( #[ cfg($name) ] $name ( PooledConnection < ConnectionManager < $ty > > ) , ) + }
#[ allow(non_camel_case_types) ]
pub enum DbPool { $( #[ cfg($name) ] $name ( Pool < ConnectionManager < $ty > > ) , ) + }
impl DbPool {
// For the given database URL, guess it's type, run migrations create pool and return it
pub fn from_config ( ) -> Result < Self , Error > {
let url = CONFIG . database_url ( ) ;
let conn_type = DbConnType ::from_url ( & url ) ? ;
match conn_type { $(
DbConnType ::$name = > {
#[ cfg($name) ]
{
2020-10-03 20:31:52 +00:00
paste ::paste! { [ < $name _migrations > ] ::run_migrations ( ) ? ; }
2020-08-18 15:15:44 +00:00
let manager = ConnectionManager ::new ( & url ) ;
let pool = Pool ::builder ( ) . build ( manager ) . map_res ( " Failed to create pool " ) ? ;
return Ok ( Self ::$name ( pool ) ) ;
}
#[ cfg(not($name)) ]
#[ allow(unreachable_code) ]
return unreachable! ( " Trying to use a DB backend when it's feature is disabled " ) ;
} ,
) + }
}
// Get a connection from the pool
pub fn get ( & self ) -> Result < DbConn , Error > {
match self { $(
#[ cfg($name) ]
Self ::$name ( p ) = > Ok ( DbConn ::$name ( p . get ( ) . map_res ( " Error retrieving connection from pool " ) ? ) ) ,
) + }
}
}
} ;
}
2019-05-26 21:02:41 +00:00
2020-08-18 15:15:44 +00:00
generate_connections! {
sqlite : diesel ::sqlite ::SqliteConnection ,
mysql : diesel ::mysql ::MysqlConnection ,
postgresql : diesel ::pg ::PgConnection
}
impl DbConnType {
pub fn from_url ( url : & str ) -> Result < DbConnType , Error > {
// Mysql
if url . starts_with ( " mysql: " ) {
#[ cfg(mysql) ]
return Ok ( DbConnType ::mysql ) ;
#[ cfg(not(mysql)) ]
err! ( " `DATABASE_URL` is a MySQL URL, but the 'mysql' feature is not enabled " )
// Postgres
} else if url . starts_with ( " postgresql: " ) | | url . starts_with ( " postgres: " ) {
#[ cfg(postgresql) ]
return Ok ( DbConnType ::postgresql ) ;
2018-02-10 00:00:55 +00:00
2020-08-18 15:15:44 +00:00
#[ cfg(not(postgresql)) ]
err! ( " `DATABASE_URL` is a PostgreSQL URL, but the 'postgresql' feature is not enabled " )
//Sqlite
} else {
#[ cfg(sqlite) ]
return Ok ( DbConnType ::sqlite ) ;
#[ cfg(not(sqlite)) ]
err! ( " `DATABASE_URL` looks like a SQLite URL, but 'sqlite' feature is not enabled " )
}
}
2018-02-10 00:00:55 +00:00
}
2020-08-18 15:15:44 +00:00
#[ macro_export ]
macro_rules ! db_run {
// Same for all dbs
( $conn :ident : $body :block ) = > {
db_run! { $conn : sqlite , mysql , postgresql $body }
} ;
// Different code for each db
( $conn :ident : $( $( $db :ident ) , + $body :block ) + ) = > {
#[ allow(unused) ] use diesel ::prelude ::* ;
match $conn {
$( $(
#[ cfg($db) ]
crate ::db ::DbConn ::$db ( ref $conn ) = > {
paste ::paste! {
#[ allow(unused) ] use crate ::db ::[ < __ $db _schema > ] ::{ self as schema , * } ;
#[ allow(unused) ] use [ < __ $db _model > ] ::* ;
#[ allow(unused) ] use crate ::db ::FromDb ;
}
$body
} ,
) + ) +
}
} ;
2018-02-10 00:00:55 +00:00
}
2020-08-18 15:15:44 +00:00
pub trait FromDb {
type Output ;
2020-08-28 20:10:28 +00:00
#[ allow(clippy::wrong_self_convention) ]
2020-08-18 15:15:44 +00:00
fn from_db ( self ) -> Self ::Output ;
}
// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql),
// to implement the Diesel traits. We also provide methods to convert between them and the basic structs. Later, that module will be auto imported when using db_run!
#[ macro_export ]
macro_rules ! db_object {
( $(
$( #[ $attr:meta ] ) *
pub struct $name :ident {
$( $( #[ $field_attr:meta ] ) * $vis :vis $field :ident : $typ :ty ) , +
$(, ) ?
}
) + ) = > {
// Create the normal struct, without attributes
$( pub struct $name { $( /* $( #[$field_attr] ) * */ $vis $field : $typ , ) + } ) +
#[ cfg(sqlite) ]
pub mod __sqlite_model { $( db_object! { @ db sqlite | $( #[ $attr ] ) * | $name | $( $( #[ $field_attr ] ) * $field : $typ ) , + } ) + }
#[ cfg(mysql) ]
pub mod __mysql_model { $( db_object! { @ db mysql | $( #[ $attr ] ) * | $name | $( $( #[ $field_attr ] ) * $field : $typ ) , + } ) + }
#[ cfg(postgresql) ]
pub mod __postgresql_model { $( db_object! { @ db postgresql | $( #[ $attr ] ) * | $name | $( $( #[ $field_attr ] ) * $field : $typ ) , + } ) + }
} ;
( @ db $db :ident | $( #[ $attr:meta ] ) * | $name :ident | $( $( #[ $field_attr:meta ] ) * $vis :vis $field :ident : $typ :ty ) , + ) = > {
paste ::paste! {
#[ allow(unused) ] use super ::* ;
#[ allow(unused) ] use diesel ::prelude ::* ;
#[ allow(unused) ] use crate ::db ::[ < __ $db _schema > ] ::* ;
$( #[ $attr ] ) *
pub struct [ < $name Db > ] { $(
$( #[ $field_attr ] ) * $vis $field : $typ ,
) + }
impl [ < $name Db > ] {
2020-08-28 20:10:28 +00:00
#[ allow(clippy::wrong_self_convention) ]
2020-08-18 15:15:44 +00:00
#[ inline(always) ] pub fn to_db ( x : & super ::$name ) -> Self { Self { $( $field : x . $field . clone ( ) , ) + } }
}
impl crate ::db ::FromDb for [ < $name Db > ] {
type Output = super ::$name ;
#[ inline(always) ] fn from_db ( self ) -> Self ::Output { super ::$name { $( $field : self . $field , ) + } }
}
impl crate ::db ::FromDb for Vec < [ < $name Db > ] > {
type Output = Vec < super ::$name > ;
#[ inline(always) ] fn from_db ( self ) -> Self ::Output { self . into_iter ( ) . map ( crate ::db ::FromDb ::from_db ) . collect ( ) }
}
impl crate ::db ::FromDb for Option < [ < $name Db > ] > {
type Output = Option < super ::$name > ;
#[ inline(always) ] fn from_db ( self ) -> Self ::Output { self . map ( crate ::db ::FromDb ::from_db ) }
}
}
} ;
}
// Reexport the models, needs to be after the macros are defined so it can access them
pub mod models ;
2019-05-03 13:46:29 +00:00
/// Creates a back-up of the database using sqlite3
pub fn backup_database ( ) -> Result < ( ) , Error > {
2019-10-11 10:08:40 +00:00
use std ::path ::Path ;
let db_url = CONFIG . database_url ( ) ;
let db_path = Path ::new ( & db_url ) . parent ( ) . unwrap ( ) ;
2019-05-03 13:46:29 +00:00
let now : DateTime < Utc > = Utc ::now ( ) ;
2019-07-09 15:26:34 +00:00
let file_date = now . format ( " %Y%m%d " ) . to_string ( ) ;
2019-05-03 13:46:29 +00:00
let backup_command : String = format! ( " {} {} {} " , " .backup 'db_ " , file_date , " .sqlite3' " ) ;
Command ::new ( " sqlite3 " )
2019-10-11 10:08:40 +00:00
. current_dir ( db_path )
2019-05-03 13:46:29 +00:00
. args ( & [ " db.sqlite3 " , & backup_command ] )
. output ( )
. expect ( " Can't open database, sqlite3 is not available, make sure it's installed and available on the PATH " ) ;
Ok ( ( ) )
}
2018-02-10 00:00:55 +00:00
/// Attempts to retrieve a single connection from the managed database pool. If
/// no pool is currently managed, fails with an `InternalServerError` status. If
/// no connections are available, fails with a `ServiceUnavailable` status.
impl < ' a , ' r > FromRequest < ' a , ' r > for DbConn {
type Error = ( ) ;
2020-07-19 19:01:31 +00:00
fn from_request ( request : & ' a Request < ' r > ) -> Outcome < DbConn , ( ) > {
2020-03-16 15:36:44 +00:00
// https://github.com/SergioBenitez/Rocket/commit/e3c1a4ad3ab9b840482ec6de4200d30df43e357c
2020-08-18 15:15:44 +00:00
let pool = try_outcome! ( request . guard ::< State < DbPool > > ( ) ) ;
2018-02-10 00:00:55 +00:00
match pool . get ( ) {
2020-08-18 15:15:44 +00:00
Ok ( conn ) = > Outcome ::Success ( conn ) ,
2018-12-30 22:34:31 +00:00
Err ( _ ) = > Outcome ::Failure ( ( Status ::ServiceUnavailable , ( ) ) ) ,
2018-02-10 00:00:55 +00:00
}
}
}
2020-08-18 15:15:44 +00:00
// Embed the migrations from the migrations folder into the application
// This way, the program automatically migrates the database to the latest version
// https://docs.rs/diesel_migrations/*/diesel_migrations/macro.embed_migrations.html
#[ cfg(sqlite) ]
mod sqlite_migrations {
#[ allow(unused_imports) ]
embed_migrations! ( " migrations/sqlite " ) ;
2020-10-03 20:31:52 +00:00
pub fn run_migrations ( ) -> Result < ( ) , super ::Error > {
2020-08-18 15:15:44 +00:00
// Make sure the directory exists
let url = crate ::CONFIG . database_url ( ) ;
let path = std ::path ::Path ::new ( & url ) ;
if let Some ( parent ) = path . parent ( ) {
if std ::fs ::create_dir_all ( parent ) . is_err ( ) {
error! ( " Error creating database directory " ) ;
std ::process ::exit ( 1 ) ;
}
}
use diesel ::{ Connection , RunQueryDsl } ;
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection =
2020-10-03 20:31:52 +00:00
diesel ::sqlite ::SqliteConnection ::establish ( & crate ::CONFIG . database_url ( ) ) ? ;
2020-08-18 15:15:44 +00:00
// Disable Foreign Key Checks during migration
// Scoped to a connection.
diesel ::sql_query ( " PRAGMA foreign_keys = OFF " )
. execute ( & connection )
. expect ( " Failed to disable Foreign Key Checks during migrations " ) ;
// Turn on WAL in SQLite
if crate ::CONFIG . enable_db_wal ( ) {
diesel ::sql_query ( " PRAGMA journal_mode=wal " )
. execute ( & connection )
. expect ( " Failed to turn on WAL " ) ;
}
2020-10-03 20:31:52 +00:00
embedded_migrations ::run_with_output ( & connection , & mut std ::io ::stdout ( ) ) ? ;
Ok ( ( ) )
2020-08-18 15:15:44 +00:00
}
}
#[ cfg(mysql) ]
mod mysql_migrations {
#[ allow(unused_imports) ]
embed_migrations! ( " migrations/mysql " ) ;
2020-10-03 20:31:52 +00:00
pub fn run_migrations ( ) -> Result < ( ) , super ::Error > {
2020-08-18 15:15:44 +00:00
use diesel ::{ Connection , RunQueryDsl } ;
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection =
2020-10-03 20:31:52 +00:00
diesel ::mysql ::MysqlConnection ::establish ( & crate ::CONFIG . database_url ( ) ) ? ;
2020-08-18 15:15:44 +00:00
// Disable Foreign Key Checks during migration
// Scoped to a connection/session.
diesel ::sql_query ( " SET FOREIGN_KEY_CHECKS = 0 " )
. execute ( & connection )
. expect ( " Failed to disable Foreign Key Checks during migrations " ) ;
2020-10-03 20:31:52 +00:00
embedded_migrations ::run_with_output ( & connection , & mut std ::io ::stdout ( ) ) ? ;
Ok ( ( ) )
2020-08-18 15:15:44 +00:00
}
}
#[ cfg(postgresql) ]
mod postgresql_migrations {
#[ allow(unused_imports) ]
embed_migrations! ( " migrations/postgresql " ) ;
2020-10-03 20:31:52 +00:00
pub fn run_migrations ( ) -> Result < ( ) , super ::Error > {
2020-08-18 15:15:44 +00:00
use diesel ::{ Connection , RunQueryDsl } ;
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection =
2020-10-03 20:31:52 +00:00
diesel ::pg ::PgConnection ::establish ( & crate ::CONFIG . database_url ( ) ) ? ;
2020-08-18 15:15:44 +00:00
// Disable Foreign Key Checks during migration
// FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html,
// "SET CONSTRAINTS sets the behavior of constraint checking within the
// current transaction", so this setting probably won't take effect for
// any of the migrations since it's being run outside of a transaction.
// Migrations that need to disable foreign key checks should run this
// from within the migration script itself.
diesel ::sql_query ( " SET CONSTRAINTS ALL DEFERRED " )
. execute ( & connection )
. expect ( " Failed to disable Foreign Key Checks during migrations " ) ;
2020-10-03 20:31:52 +00:00
embedded_migrations ::run_with_output ( & connection , & mut std ::io ::stdout ( ) ) ? ;
Ok ( ( ) )
2018-02-10 00:00:55 +00:00
}
}