Update to diesel2

This commit is contained in:
Daniel García 2022-05-20 23:39:47 +02:00
parent b878495d64
commit 8409b31d6b
No known key found for this signature in database
GPG Key ID: FC8A7D14C3CD543A
36 changed files with 1267 additions and 1270 deletions

331
Cargo.lock generated
View File

@ -122,9 +122,9 @@ dependencies = [
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.57" version = "0.1.58"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -228,9 +228,9 @@ dependencies = [
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.11.0" version = "3.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
@ -300,20 +300,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1"
dependencies = [ dependencies = [
"iana-time-zone", "iana-time-zone",
"js-sys",
"num-integer", "num-integer",
"num-traits", "num-traits",
"serde", "serde",
"time 0.1.43",
"wasm-bindgen",
"winapi", "winapi",
] ]
[[package]] [[package]]
name = "chrono-tz" name = "chrono-tz"
version = "0.6.3" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29c39203181991a7dd4343b8005bd804e7a9a37afb8ac070e43771e8c820bbde" checksum = "bbc529705a6e0028189c83f0a5dd9fb214105116f7e3c0eeab7ff0369766b0d1"
dependencies = [ dependencies = [
"chrono", "chrono",
"chrono-tz-build", "chrono-tz-build",
@ -322,9 +319,9 @@ dependencies = [
[[package]] [[package]]
name = "chrono-tz-build" name = "chrono-tz-build"
version = "0.0.3" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f509c3a87b33437b05e2458750a0700e5bdd6956176773e6c7d6dd15a283a0c" checksum = "d9998fb9f7e9b2111641485bf8beb32f92945f97f92a3d061f744cfef335f751"
dependencies = [ dependencies = [
"parse-zoneinfo", "parse-zoneinfo",
"phf", "phf",
@ -361,11 +358,11 @@ dependencies = [
"base64", "base64",
"hkdf", "hkdf",
"hmac", "hmac",
"percent-encoding 2.2.0", "percent-encoding",
"rand", "rand",
"sha2", "sha2",
"subtle", "subtle",
"time 0.3.15", "time",
"version_check", "version_check",
] ]
@ -381,8 +378,8 @@ dependencies = [
"publicsuffix", "publicsuffix",
"serde", "serde",
"serde_json", "serde_json",
"time 0.3.15", "time",
"url 2.3.1", "url",
] ]
[[package]] [[package]]
@ -397,8 +394,8 @@ dependencies = [
"publicsuffix", "publicsuffix",
"serde", "serde",
"serde_json", "serde_json",
"time 0.3.15", "time",
"url 2.3.1", "url",
] ]
[[package]] [[package]]
@ -487,9 +484,9 @@ dependencies = [
[[package]] [[package]]
name = "cxx" name = "cxx"
version = "1.0.78" version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19f39818dcfc97d45b03953c1292efc4e80954e1583c4aa770bac1383e2310a4" checksum = "3f83d0ebf42c6eafb8d7c52f7e5f2d3003b89c7aa4fd2b79229209459a849af8"
dependencies = [ dependencies = [
"cc", "cc",
"cxxbridge-flags", "cxxbridge-flags",
@ -499,9 +496,9 @@ dependencies = [
[[package]] [[package]]
name = "cxx-build" name = "cxx-build"
version = "1.0.78" version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e580d70777c116df50c390d1211993f62d40302881e54d4b79727acb83d0199" checksum = "07d050484b55975889284352b0ffc2ecbda25c0c55978017c132b29ba0818a86"
dependencies = [ dependencies = [
"cc", "cc",
"codespan-reporting", "codespan-reporting",
@ -514,15 +511,15 @@ dependencies = [
[[package]] [[package]]
name = "cxxbridge-flags" name = "cxxbridge-flags"
version = "1.0.78" version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56a46460b88d1cec95112c8c363f0e2c39afdb237f60583b0b36343bf627ea9c" checksum = "99d2199b00553eda8012dfec8d3b1c75fce747cf27c169a270b3b99e3448ab78"
[[package]] [[package]]
name = "cxxbridge-macro" name = "cxxbridge-macro"
version = "1.0.78" version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "747b608fecf06b0d72d440f27acc99288207324b793be2c17991839f3d4995ea" checksum = "dcb67a6de1f602736dd7eaead0080cf3435df806c61b24b13328db128c58868f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -624,27 +621,30 @@ dependencies = [
[[package]] [[package]]
name = "diesel" name = "diesel"
version = "1.4.8" version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b28135ecf6b7d446b43e27e225622a038cc4e2930a1022f51cdb97ada19b8e4d" checksum = "68c186a7418a2aac330bb76cde82f16c36b03a66fb91db32d20214311f9f6545"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"byteorder", "byteorder",
"chrono", "chrono",
"diesel_derives", "diesel_derives",
"itoa",
"libsqlite3-sys", "libsqlite3-sys",
"mysqlclient-sys", "mysqlclient-sys",
"percent-encoding",
"pq-sys", "pq-sys",
"r2d2", "r2d2",
"url 1.7.2", "url",
] ]
[[package]] [[package]]
name = "diesel_derives" name = "diesel_derives"
version = "1.4.1" version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" checksum = "143b758c91dbc3fe1fdcb0dba5bd13276c6a66422f2ef5795b58488248a310aa"
dependencies = [ dependencies = [
"proc-macro-error",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn",
@ -652,10 +652,11 @@ dependencies = [
[[package]] [[package]]
name = "diesel_migrations" name = "diesel_migrations"
version = "1.4.0" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf3cde8413353dc7f5d72fa8ce0b99a560a359d2c5ef1e5817ca731cd9008f4c" checksum = "e9ae22beef5e9d6fab9225ddb073c1c6c1a7a6ded5019d5da11d1e5c5adc34e2"
dependencies = [ dependencies = [
"diesel",
"migrations_internals", "migrations_internals",
"migrations_macros", "migrations_macros",
] ]
@ -671,34 +672,11 @@ dependencies = [
"subtle", "subtle",
] ]
[[package]]
name = "dirs"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-sys"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
dependencies = [
"libc",
"redox_users",
"winapi",
]
[[package]] [[package]]
name = "dotenvy" name = "dotenvy"
version = "0.15.5" version = "0.15.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9155c8f4dc55c7470ae9da3f63c6785245093b3f6aeb0f5bf2e968efbba314" checksum = "03d8c417d7a8cb362e0c37e5d815f5eb7c37f79ff93707329d5a194e42e54ca0"
dependencies = [
"dirs",
]
[[package]] [[package]]
name = "either" name = "either"
@ -822,14 +800,14 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
dependencies = [ dependencies = [
"percent-encoding 2.2.0", "percent-encoding",
] ]
[[package]] [[package]]
name = "futures" name = "futures"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c" checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@ -842,9 +820,9 @@ dependencies = [
[[package]] [[package]]
name = "futures-channel" name = "futures-channel"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050" checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-sink", "futures-sink",
@ -852,15 +830,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-core" name = "futures-core"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf" checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac"
[[package]] [[package]]
name = "futures-executor" name = "futures-executor"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab" checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -869,15 +847,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-io" name = "futures-io"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68" checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb"
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17" checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -886,15 +864,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-sink" name = "futures-sink"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56" checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9"
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1" checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea"
[[package]] [[package]]
name = "futures-timer" name = "futures-timer"
@ -904,9 +882,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c"
[[package]] [[package]]
name = "futures-util" name = "futures-util"
version = "0.3.24" version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90" checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@ -1180,9 +1158,9 @@ dependencies = [
[[package]] [[package]]
name = "iana-time-zone-haiku" name = "iana-time-zone-haiku"
version = "0.1.0" version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fde6edd6cef363e9359ed3c98ba64590ba9eecba2293eb5a723ab32aee8926aa" checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
dependencies = [ dependencies = [
"cxx", "cxx",
"cxx-build", "cxx-build",
@ -1194,17 +1172,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e"
dependencies = [
"matches",
"unicode-bidi",
"unicode-normalization",
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "0.2.3" version = "0.2.3"
@ -1376,9 +1343,9 @@ dependencies = [
[[package]] [[package]]
name = "libsqlite3-sys" name = "libsqlite3-sys"
version = "0.22.2" version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290b64917f8b0cb885d9de0f9959fe1f775d7fa12f1da2db9001c1c8ab60f89d" checksum = "9f0455f2c1bc9a7caa792907026e469c1d91761fb0ea37cbb16427c77280cf35"
dependencies = [ dependencies = [
"cc", "cc",
"pkg-config", "pkg-config",
@ -1481,23 +1448,23 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]] [[package]]
name = "migrations_internals" name = "migrations_internals"
version = "1.4.1" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b4fc84e4af020b837029e017966f86a1c2d5e83e64b589963d5047525995860" checksum = "c493c09323068c01e54c685f7da41a9ccf9219735c3766fbfd6099806ea08fbc"
dependencies = [ dependencies = [
"diesel", "serde",
"toml",
] ]
[[package]] [[package]]
name = "migrations_macros" name = "migrations_macros"
version = "1.4.2" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c" checksum = "8a8ff27a350511de30cdabb77147501c36ef02e0451d957abea2f30caffb2b58"
dependencies = [ dependencies = [
"migrations_internals", "migrations_internals",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn",
] ]
[[package]] [[package]]
@ -1539,7 +1506,7 @@ dependencies = [
"libc", "libc",
"log", "log",
"wasi 0.11.0+wasi-snapshot-preview1", "wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys", "windows-sys 0.36.1",
] ]
[[package]] [[package]]
@ -1787,15 +1754,15 @@ dependencies = [
[[package]] [[package]]
name = "parking_lot_core" name = "parking_lot_core"
version = "0.9.3" version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall", "redox_syscall",
"smallvec", "smallvec",
"windows-sys", "windows-sys 0.42.0",
] ]
[[package]] [[package]]
@ -1845,12 +1812,6 @@ dependencies = [
"base64", "base64",
] ]
[[package]]
name = "percent-encoding"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.2.0" version = "2.2.0"
@ -1937,7 +1898,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676" checksum = "e1fb5f6f826b772a8d4c0394209441e7d37cbbb967ae9c7e0e8134365c9ee676"
dependencies = [ dependencies = [
"siphasher", "siphasher",
"uncased",
] ]
[[package]] [[package]]
@ -1991,6 +1951,30 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]] [[package]]
name = "proc-macro-hack" name = "proc-macro-hack"
version = "0.5.19" version = "0.5.19"
@ -1999,9 +1983,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.46" version = "1.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b" checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@ -2131,17 +2115,6 @@ dependencies = [
"bitflags", "bitflags",
] ]
[[package]]
name = "redox_users"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
dependencies = [
"getrandom",
"redox_syscall",
"thiserror",
]
[[package]] [[package]]
name = "ref-cast" name = "ref-cast"
version = "1.0.12" version = "1.0.12"
@ -2222,7 +2195,7 @@ dependencies = [
"mime", "mime",
"native-tls", "native-tls",
"once_cell", "once_cell",
"percent-encoding 2.2.0", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"proc-macro-hack", "proc-macro-hack",
"serde", "serde",
@ -2234,7 +2207,7 @@ dependencies = [
"tokio-util", "tokio-util",
"tower-service", "tower-service",
"trust-dns-resolver", "trust-dns-resolver",
"url 2.3.1", "url",
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"web-sys", "web-sys",
@ -2317,7 +2290,7 @@ dependencies = [
"serde_json", "serde_json",
"state", "state",
"tempfile", "tempfile",
"time 0.3.15", "time",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"tokio-util", "tokio-util",
@ -2357,7 +2330,7 @@ dependencies = [
"log", "log",
"memchr", "memchr",
"pear", "pear",
"percent-encoding 2.2.0", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"ref-cast", "ref-cast",
"rustls", "rustls",
@ -2366,7 +2339,7 @@ dependencies = [
"smallvec", "smallvec",
"stable-pattern", "stable-pattern",
"state", "state",
"time 0.3.15", "time",
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
"uncased", "uncased",
@ -2380,9 +2353,9 @@ checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.20.6" version = "0.20.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c"
dependencies = [ dependencies = [
"log", "log",
"ring", "ring",
@ -2427,7 +2400,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2"
dependencies = [ dependencies = [
"lazy_static", "lazy_static",
"windows-sys", "windows-sys 0.36.1",
] ]
[[package]] [[package]]
@ -2522,9 +2495,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.86" version = "1.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -2603,7 +2576,7 @@ dependencies = [
"num-bigint", "num-bigint",
"num-traits", "num-traits",
"thiserror", "thiserror",
"time 0.3.15", "time",
] ]
[[package]] [[package]]
@ -2700,7 +2673,7 @@ dependencies = [
"hostname", "hostname",
"libc", "libc",
"log", "log",
"time 0.3.15", "time",
] ]
[[package]] [[package]]
@ -2764,16 +2737,6 @@ dependencies = [
"num_cpus", "num_cpus",
] ]
[[package]]
name = "time"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438"
dependencies = [
"libc",
"winapi",
]
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.15" version = "0.3.15"
@ -3020,7 +2983,7 @@ dependencies = [
"thiserror", "thiserror",
"tinyvec", "tinyvec",
"tokio", "tokio",
"url 2.3.1", "url",
] ]
[[package]] [[package]]
@ -3064,7 +3027,7 @@ dependencies = [
"rand", "rand",
"sha-1", "sha-1",
"thiserror", "thiserror",
"url 2.3.1", "url",
"utf-8", "utf-8",
] ]
@ -3148,17 +3111,6 @@ version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "url"
version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a"
dependencies = [
"idna 0.1.5",
"matches",
"percent-encoding 1.0.1",
]
[[package]] [[package]]
name = "url" name = "url"
version = "2.3.1" version = "2.3.1"
@ -3167,7 +3119,7 @@ checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
dependencies = [ dependencies = [
"form_urlencoded", "form_urlencoded",
"idna 0.3.0", "idna 0.3.0",
"percent-encoding 2.2.0", "percent-encoding",
"serde", "serde",
] ]
@ -3226,7 +3178,7 @@ dependencies = [
"once_cell", "once_cell",
"openssl", "openssl",
"paste", "paste",
"percent-encoding 2.2.0", "percent-encoding",
"pico-args", "pico-args",
"rand", "rand",
"regex", "regex",
@ -3237,12 +3189,12 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"syslog", "syslog",
"time 0.3.15", "time",
"tokio", "tokio",
"tokio-tungstenite", "tokio-tungstenite",
"totp-lite", "totp-lite",
"tracing", "tracing",
"url 2.3.1", "url",
"uuid", "uuid",
"webauthn-rs", "webauthn-rs",
"yubico", "yubico",
@ -3385,7 +3337,7 @@ dependencies = [
"serde_json", "serde_json",
"thiserror", "thiserror",
"tracing", "tracing",
"url 2.3.1", "url",
] ]
[[package]] [[package]]
@ -3461,6 +3413,27 @@ dependencies = [
"windows_x86_64_msvc 0.36.1", "windows_x86_64_msvc 0.36.1",
] ]
[[package]]
name = "windows-sys"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc 0.42.0",
"windows_i686_gnu 0.42.0",
"windows_i686_msvc 0.42.0",
"windows_x86_64_gnu 0.42.0",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc 0.42.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.32.0" version = "0.32.0"
@ -3473,6 +3446,12 @@ version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.32.0" version = "0.32.0"
@ -3485,6 +3464,12 @@ version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
[[package]]
name = "windows_i686_gnu"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.32.0" version = "0.32.0"
@ -3497,6 +3482,12 @@ version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
[[package]]
name = "windows_i686_msvc"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.32.0" version = "0.32.0"
@ -3509,6 +3500,18 @@ version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.32.0" version = "0.32.0"
@ -3521,6 +3524,12 @@ version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
[[package]] [[package]]
name = "winreg" name = "winreg"
version = "0.7.0" version = "0.7.0"

View File

@ -42,7 +42,7 @@ tracing = { version = "0.1.37", features = ["log"] } # Needed to have lettre and
backtrace = "0.3.66" # Logging panics to logfile instead stderr only backtrace = "0.3.66" # Logging panics to logfile instead stderr only
# A `dotenv` implementation for Rust # A `dotenv` implementation for Rust
dotenvy = { version = "0.15.5", default-features = false } dotenvy = { version = "0.15.6", default-features = false }
# Lazy initialization # Lazy initialization
once_cell = "1.15.0" once_cell = "1.15.0"
@ -60,19 +60,19 @@ rmpv = "1.0.0" # MessagePack library
dashmap = "5.4.0" dashmap = "5.4.0"
# Async futures # Async futures
futures = "0.3.24" futures = "0.3.25"
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time"] } tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time"] }
# A generic serialization/deserialization framework # A generic serialization/deserialization framework
serde = { version = "1.0.145", features = ["derive"] } serde = { version = "1.0.145", features = ["derive"] }
serde_json = "1.0.86" serde_json = "1.0.87"
# A safe, extensible ORM and Query builder # A safe, extensible ORM and Query builder
diesel = { version = "1.4.8", features = ["chrono", "r2d2"] } diesel = { version = "2.0.2", features = ["chrono", "r2d2"] }
diesel_migrations = "1.4.0" diesel_migrations = "2.0.0"
# Bundled SQLite # Bundled SQLite
libsqlite3-sys = { version = "0.22.2", features = ["bundled"], optional = true } libsqlite3-sys = { version = "0.25.1", features = ["bundled"], optional = true }
# Crypto-related libraries # Crypto-related libraries
rand = { version = "0.8.5", features = ["small_rng"] } rand = { version = "0.8.5", features = ["small_rng"] }
@ -83,7 +83,7 @@ uuid = { version = "1.2.1", features = ["v4"] }
# Date and time libraries # Date and time libraries
chrono = { version = "0.4.22", features = ["clock", "serde"], default-features = false } chrono = { version = "0.4.22", features = ["clock", "serde"], default-features = false }
chrono-tz = "0.6.3" chrono-tz = "0.7.0"
time = "0.3.15" time = "0.3.15"
# Job scheduler # Job scheduler

View File

@ -25,8 +25,6 @@ use crate::{
CONFIG, VERSION, CONFIG, VERSION,
}; };
use futures::{stream, stream::StreamExt};
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() { if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() {
return routes![admin_disabled]; return routes![admin_disabled];
@ -269,7 +267,7 @@ struct InviteData {
email: String, email: String,
} }
async fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult<User> { async fn get_user_or_404(uuid: &str, conn: &mut DbConn) -> ApiResult<User> {
if let Some(user) = User::find_by_uuid(uuid, conn).await { if let Some(user) = User::find_by_uuid(uuid, conn).await {
Ok(user) Ok(user)
} else { } else {
@ -278,16 +276,16 @@ async fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult<User> {
} }
#[post("/invite", data = "<data>")] #[post("/invite", data = "<data>")]
async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult { async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbConn) -> JsonResult {
let data: InviteData = data.into_inner(); let data: InviteData = data.into_inner();
let email = data.email.clone(); let email = data.email.clone();
if User::find_by_mail(&data.email, &conn).await.is_some() { if User::find_by_mail(&data.email, &mut conn).await.is_some() {
err_code!("User already exists", Status::Conflict.code) err_code!("User already exists", Status::Conflict.code)
} }
let mut user = User::new(email); let mut user = User::new(email);
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult { async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
} else { } else {
@ -296,10 +294,10 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -
} }
} }
_generate_invite(&user, &conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?; _generate_invite(&user, &mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
user.save(&conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?; user.save(&mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
Ok(Json(user.to_json(&conn).await)) Ok(Json(user.to_json(&mut conn).await))
} }
#[post("/test/smtp", data = "<data>")] #[post("/test/smtp", data = "<data>")]
@ -320,93 +318,87 @@ fn logout(cookies: &CookieJar<'_>, referer: Referer) -> Redirect {
} }
#[get("/users")] #[get("/users")]
async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> { async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
let users_json = stream::iter(User::get_all(&conn).await) let mut users_json = Vec::new();
.then(|u| async { for u in User::get_all(&mut conn).await {
let u = u; // Move out this single variable let mut usr = u.to_json(&mut conn).await;
let mut usr = u.to_json(&conn).await; usr["UserEnabled"] = json!(u.enabled);
usr["UserEnabled"] = json!(u.enabled); usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); users_json.push(usr);
usr }
})
.collect::<Vec<Value>>()
.await;
Json(Value::Array(users_json)) Json(Value::Array(users_json))
} }
#[get("/users/overview")] #[get("/users/overview")]
async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
let users_json = stream::iter(User::get_all(&conn).await) let mut users_json = Vec::new();
.then(|u| async { for u in User::get_all(&mut conn).await {
let u = u; // Move out this single variable let mut usr = u.to_json(&mut conn).await;
let mut usr = u.to_json(&conn).await; usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await);
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn).await); usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await);
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn).await); usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &mut conn).await as i32));
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn).await as i32)); usr["user_enabled"] = json!(u.enabled);
usr["user_enabled"] = json!(u.enabled); usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); usr["last_active"] = match u.last_active(&mut conn).await {
usr["last_active"] = match u.last_active(&conn).await { Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), None => json!("Never"),
None => json!("Never"), };
}; users_json.push(usr);
usr }
})
.collect::<Vec<Value>>()
.await;
let text = AdminTemplateData::with_data("admin/users", json!(users_json)).render()?; let text = AdminTemplateData::with_data("admin/users", json!(users_json)).render()?;
Ok(Html(text)) Ok(Html(text))
} }
#[get("/users/<uuid>")] #[get("/users/<uuid>")]
async fn get_user_json(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult { async fn get_user_json(uuid: String, _token: AdminToken, mut conn: DbConn) -> JsonResult {
let u = get_user_or_404(&uuid, &conn).await?; let u = get_user_or_404(&uuid, &mut conn).await?;
let mut usr = u.to_json(&conn).await; let mut usr = u.to_json(&mut conn).await;
usr["UserEnabled"] = json!(u.enabled); usr["UserEnabled"] = json!(u.enabled);
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
Ok(Json(usr)) Ok(Json(usr))
} }
#[post("/users/<uuid>/delete")] #[post("/users/<uuid>/delete")]
async fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn delete_user(uuid: String, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let user = get_user_or_404(&uuid, &conn).await?; let user = get_user_or_404(&uuid, &mut conn).await?;
user.delete(&conn).await user.delete(&mut conn).await
} }
#[post("/users/<uuid>/deauth")] #[post("/users/<uuid>/deauth")]
async fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn deauth_user(uuid: String, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&uuid, &conn).await?; let mut user = get_user_or_404(&uuid, &mut conn).await?;
Device::delete_all_by_user(&user.uuid, &conn).await?; Device::delete_all_by_user(&user.uuid, &mut conn).await?;
user.reset_security_stamp(); user.reset_security_stamp();
user.save(&conn).await user.save(&mut conn).await
} }
#[post("/users/<uuid>/disable")] #[post("/users/<uuid>/disable")]
async fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn disable_user(uuid: String, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&uuid, &conn).await?; let mut user = get_user_or_404(&uuid, &mut conn).await?;
Device::delete_all_by_user(&user.uuid, &conn).await?; Device::delete_all_by_user(&user.uuid, &mut conn).await?;
user.reset_security_stamp(); user.reset_security_stamp();
user.enabled = false; user.enabled = false;
user.save(&conn).await user.save(&mut conn).await
} }
#[post("/users/<uuid>/enable")] #[post("/users/<uuid>/enable")]
async fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn enable_user(uuid: String, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&uuid, &conn).await?; let mut user = get_user_or_404(&uuid, &mut conn).await?;
user.enabled = true; user.enabled = true;
user.save(&conn).await user.save(&mut conn).await
} }
#[post("/users/<uuid>/remove-2fa")] #[post("/users/<uuid>/remove-2fa")]
async fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn remove_2fa(uuid: String, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&uuid, &conn).await?; let mut user = get_user_or_404(&uuid, &mut conn).await?;
TwoFactor::delete_all_by_user(&user.uuid, &conn).await?; TwoFactor::delete_all_by_user(&user.uuid, &mut conn).await?;
user.totp_recover = None; user.totp_recover = None;
user.save(&conn).await user.save(&mut conn).await
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -417,13 +409,14 @@ struct UserOrgTypeData {
} }
#[post("/users/org_type", data = "<data>")] #[post("/users/org_type", data = "<data>")]
async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let data: UserOrgTypeData = data.into_inner(); let data: UserOrgTypeData = data.into_inner();
let mut user_to_edit = match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn).await { let mut user_to_edit =
Some(user) => user, match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &mut conn).await {
None => err!("The specified user isn't member of the organization"), Some(user) => user,
}; None => err!("The specified user isn't member of the organization"),
};
let new_type = match UserOrgType::from_str(&data.user_type.into_string()) { let new_type = match UserOrgType::from_str(&data.user_type.into_string()) {
Some(new_type) => new_type as i32, Some(new_type) => new_type as i32,
@ -432,7 +425,7 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c
if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner { if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner {
// Removing owner permmission, check that there is at least one other confirmed owner // Removing owner permmission, check that there is at least one other confirmed owner
if UserOrganization::count_confirmed_by_org_and_type(&data.org_uuid, UserOrgType::Owner, &conn).await <= 1 { if UserOrganization::count_confirmed_by_org_and_type(&data.org_uuid, UserOrgType::Owner, &mut conn).await <= 1 {
err!("Can't change the type of the last owner") err!("Can't change the type of the last owner")
} }
} }
@ -440,7 +433,7 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c
// This check is also done at api::organizations::{accept_invite(), _confirm_invite, _activate_user(), edit_user()}, update_user_org_type // This check is also done at api::organizations::{accept_invite(), _confirm_invite, _activate_user(), edit_user()}, update_user_org_type
// It returns different error messages per function. // It returns different error messages per function.
if new_type < UserOrgType::Admin { if new_type < UserOrgType::Admin {
match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, &user_to_edit.org_uuid, true, &conn).await { match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, &user_to_edit.org_uuid, true, &mut conn).await {
Ok(_) => {} Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => { Err(OrgPolicyErr::TwoFactorMissing) => {
err!("You cannot modify this user to this type because it has no two-step login method activated"); err!("You cannot modify this user to this type because it has no two-step login method activated");
@ -452,37 +445,34 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c
} }
user_to_edit.atype = new_type; user_to_edit.atype = new_type;
user_to_edit.save(&conn).await user_to_edit.save(&mut conn).await
} }
#[post("/users/update_revision")] #[post("/users/update_revision")]
async fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { async fn update_revision_users(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
User::update_all_revisions(&conn).await User::update_all_revisions(&mut conn).await
} }
#[get("/organizations/overview")] #[get("/organizations/overview")]
async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { async fn organizations_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
let organizations_json = stream::iter(Organization::get_all(&conn).await) let mut organizations_json = Vec::new();
.then(|o| async { for o in Organization::get_all(&mut conn).await {
let o = o; //Move out this single variable let mut org = o.to_json();
let mut org = o.to_json(); org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &mut conn).await);
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn).await); org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &mut conn).await);
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn).await); org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &mut conn).await);
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn).await); org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &mut conn).await as i32));
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn).await as i32)); organizations_json.push(org);
org }
})
.collect::<Vec<Value>>()
.await;
let text = AdminTemplateData::with_data("admin/organizations", json!(organizations_json)).render()?; let text = AdminTemplateData::with_data("admin/organizations", json!(organizations_json)).render()?;
Ok(Html(text)) Ok(Html(text))
} }
#[post("/organizations/<uuid>/delete")] #[post("/organizations/<uuid>/delete")]
async fn delete_organization(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { async fn delete_organization(uuid: String, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let org = Organization::find_by_uuid(&uuid, &conn).await.map_res("Organization doesn't exist")?; let org = Organization::find_by_uuid(&uuid, &mut conn).await.map_res("Organization doesn't exist")?;
org.delete(&conn).await org.delete(&mut conn).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -558,7 +548,7 @@ async fn get_release_info(has_http_access: bool, running_within_docker: bool) ->
} }
#[get("/diagnostics")] #[get("/diagnostics")]
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> { async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) -> ApiResult<Html<String>> {
use chrono::prelude::*; use chrono::prelude::*;
use std::net::ToSocketAddrs; use std::net::ToSocketAddrs;
@ -612,7 +602,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
"ip_header_config": &CONFIG.ip_header(), "ip_header_config": &CONFIG.ip_header(),
"uses_proxy": uses_proxy, "uses_proxy": uses_proxy,
"db_type": *DB_TYPE, "db_type": *DB_TYPE,
"db_version": get_sql_server_version(&conn).await, "db_version": get_sql_server_version(&mut conn).await,
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))), "admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
"overrides": &CONFIG.get_overrides().join(", "), "overrides": &CONFIG.get_overrides().join(", "),
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(), "server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
@ -641,9 +631,9 @@ fn delete_config(_token: AdminToken) -> EmptyResult {
} }
#[post("/config/backup_db")] #[post("/config/backup_db")]
async fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult { async fn backup_db(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
if *CAN_BACKUP { if *CAN_BACKUP {
backup_database(&conn).await backup_database(&mut conn).await
} else { } else {
err!("Can't back up current DB (Only SQLite supports this feature)"); err!("Can't back up current DB (Only SQLite supports this feature)");
} }

View File

@ -81,7 +81,7 @@ fn enforce_password_hint_setting(password_hint: &Option<String>) -> EmptyResult
} }
#[post("/accounts/register", data = "<data>")] #[post("/accounts/register", data = "<data>")]
async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult { async fn register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> JsonResult {
let data: RegisterData = data.into_inner().data; let data: RegisterData = data.into_inner().data;
let email = data.Email.to_lowercase(); let email = data.Email.to_lowercase();
@ -100,7 +100,7 @@ async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
let mut verified_by_invite = false; let mut verified_by_invite = false;
let mut user = match User::find_by_mail(&email, &conn).await { let mut user = match User::find_by_mail(&email, &mut conn).await {
Some(mut user) => { Some(mut user) => {
if !user.password_hash.is_empty() { if !user.password_hash.is_empty() {
err!("Registration not allowed or user already exists") err!("Registration not allowed or user already exists")
@ -116,14 +116,14 @@ async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
} else { } else {
err!("Registration email does not match invite email") err!("Registration email does not match invite email")
} }
} else if Invitation::take(&email, &conn).await { } else if Invitation::take(&email, &mut conn).await {
for mut user_org in UserOrganization::find_invited_by_user(&user.uuid, &conn).await.iter_mut() { for mut user_org in UserOrganization::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() {
user_org.status = UserOrgStatus::Accepted as i32; user_org.status = UserOrgStatus::Accepted as i32;
user_org.save(&conn).await?; user_org.save(&mut conn).await?;
} }
user user
} else if CONFIG.is_signup_allowed(&email) } else if CONFIG.is_signup_allowed(&email)
|| EmergencyAccess::find_invited_by_grantee_email(&email, &conn).await.is_some() || EmergencyAccess::find_invited_by_grantee_email(&email, &mut conn).await.is_some()
{ {
user user
} else { } else {
@ -134,7 +134,7 @@ async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
// Order is important here; the invitation check must come first // Order is important here; the invitation check must come first
// because the vaultwarden admin can invite anyone, regardless // because the vaultwarden admin can invite anyone, regardless
// of other signup restrictions. // of other signup restrictions.
if Invitation::take(&email, &conn).await || CONFIG.is_signup_allowed(&email) { if Invitation::take(&email, &mut conn).await || CONFIG.is_signup_allowed(&email) {
User::new(email.clone()) User::new(email.clone())
} else { } else {
err!("Registration not allowed or user already exists") err!("Registration not allowed or user already exists")
@ -143,7 +143,7 @@ async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
}; };
// Make sure we don't leave a lingering invitation. // Make sure we don't leave a lingering invitation.
Invitation::take(&email, &conn).await; Invitation::take(&email, &mut conn).await;
if let Some(client_kdf_iter) = data.KdfIterations { if let Some(client_kdf_iter) = data.KdfIterations {
user.client_kdf_iter = client_kdf_iter; user.client_kdf_iter = client_kdf_iter;
@ -179,7 +179,7 @@ async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
} }
} }
user.save(&conn).await?; user.save(&mut conn).await?;
Ok(Json(json!({ Ok(Json(json!({
"Object": "register", "Object": "register",
"CaptchaBypassToken": "", "CaptchaBypassToken": "",
@ -187,8 +187,8 @@ async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
} }
#[get("/accounts/profile")] #[get("/accounts/profile")]
async fn profile(headers: Headers, conn: DbConn) -> Json<Value> { async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
Json(headers.user.to_json(&conn).await) Json(headers.user.to_json(&mut conn).await)
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -205,7 +205,7 @@ async fn put_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbCo
} }
#[post("/accounts/profile", data = "<data>")] #[post("/accounts/profile", data = "<data>")]
async fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult { async fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: ProfileData = data.into_inner().data; let data: ProfileData = data.into_inner().data;
// Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden)
@ -217,13 +217,13 @@ async fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbC
let mut user = headers.user; let mut user = headers.user;
user.name = data.Name; user.name = data.Name;
user.save(&conn).await?; user.save(&mut conn).await?;
Ok(Json(user.to_json(&conn).await)) Ok(Json(user.to_json(&mut conn).await))
} }
#[get("/users/<uuid>/public-key")] #[get("/users/<uuid>/public-key")]
async fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonResult { async fn get_public_keys(uuid: String, _headers: Headers, mut conn: DbConn) -> JsonResult {
let user = match User::find_by_uuid(&uuid, &conn).await { let user = match User::find_by_uuid(&uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("User doesn't exist"), None => err!("User doesn't exist"),
}; };
@ -236,7 +236,7 @@ async fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonR
} }
#[post("/accounts/keys", data = "<data>")] #[post("/accounts/keys", data = "<data>")]
async fn post_keys(data: JsonUpcase<KeysData>, headers: Headers, conn: DbConn) -> JsonResult { async fn post_keys(data: JsonUpcase<KeysData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: KeysData = data.into_inner().data; let data: KeysData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -244,7 +244,7 @@ async fn post_keys(data: JsonUpcase<KeysData>, headers: Headers, conn: DbConn) -
user.private_key = Some(data.EncryptedPrivateKey); user.private_key = Some(data.EncryptedPrivateKey);
user.public_key = Some(data.PublicKey); user.public_key = Some(data.PublicKey);
user.save(&conn).await?; user.save(&mut conn).await?;
Ok(Json(json!({ Ok(Json(json!({
"PrivateKey": user.private_key, "PrivateKey": user.private_key,
@ -263,7 +263,7 @@ struct ChangePassData {
} }
#[post("/accounts/password", data = "<data>")] #[post("/accounts/password", data = "<data>")]
async fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: ChangePassData = data.into_inner().data; let data: ChangePassData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -279,7 +279,7 @@ async fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, conn:
Some(vec![String::from("post_rotatekey"), String::from("get_contacts"), String::from("get_public_keys")]), Some(vec![String::from("post_rotatekey"), String::from("get_contacts"), String::from("get_public_keys")]),
); );
user.akey = data.Key; user.akey = data.Key;
user.save(&conn).await user.save(&mut conn).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -294,7 +294,7 @@ struct ChangeKdfData {
} }
#[post("/accounts/kdf", data = "<data>")] #[post("/accounts/kdf", data = "<data>")]
async fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: ChangeKdfData = data.into_inner().data; let data: ChangeKdfData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -306,7 +306,7 @@ async fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, conn: DbCon
user.client_kdf_type = data.Kdf; user.client_kdf_type = data.Kdf;
user.set_password(&data.NewMasterPasswordHash, None); user.set_password(&data.NewMasterPasswordHash, None);
user.akey = data.Key; user.akey = data.Key;
user.save(&conn).await user.save(&mut conn).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -329,7 +329,7 @@ struct KeyData {
} }
#[post("/accounts/key", data = "<data>")] #[post("/accounts/key", data = "<data>")]
async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let data: KeyData = data.into_inner().data; let data: KeyData = data.into_inner().data;
if !headers.user.check_valid_password(&data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.MasterPasswordHash) {
@ -340,7 +340,7 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbCon
// Update folder data // Update folder data
for folder_data in data.Folders { for folder_data in data.Folders {
let mut saved_folder = match Folder::find_by_uuid(&folder_data.Id, &conn).await { let mut saved_folder = match Folder::find_by_uuid(&folder_data.Id, &mut conn).await {
Some(folder) => folder, Some(folder) => folder,
None => err!("Folder doesn't exist"), None => err!("Folder doesn't exist"),
}; };
@ -350,14 +350,14 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbCon
} }
saved_folder.name = folder_data.Name; saved_folder.name = folder_data.Name;
saved_folder.save(&conn).await? saved_folder.save(&mut conn).await?
} }
// Update cipher data // Update cipher data
use super::ciphers::update_cipher_from_data; use super::ciphers::update_cipher_from_data;
for cipher_data in data.Ciphers { for cipher_data in data.Ciphers {
let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &conn).await { let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
@ -368,7 +368,8 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbCon
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None // Prevent triggering cipher updates via WebSockets by settings UpdateType::None
// The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues. // The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues.
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None).await? update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &mut conn, &nt, UpdateType::None)
.await?
} }
// Update user data // Update user data
@ -378,11 +379,11 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbCon
user.private_key = Some(data.PrivateKey); user.private_key = Some(data.PrivateKey);
user.reset_security_stamp(); user.reset_security_stamp();
user.save(&conn).await user.save(&mut conn).await
} }
#[post("/accounts/security-stamp", data = "<data>")] #[post("/accounts/security-stamp", data = "<data>")]
async fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -390,9 +391,9 @@ async fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbC
err!("Invalid password") err!("Invalid password")
} }
Device::delete_all_by_user(&user.uuid, &conn).await?; Device::delete_all_by_user(&user.uuid, &mut conn).await?;
user.reset_security_stamp(); user.reset_security_stamp();
user.save(&conn).await user.save(&mut conn).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -403,7 +404,7 @@ struct EmailTokenData {
} }
#[post("/accounts/email-token", data = "<data>")] #[post("/accounts/email-token", data = "<data>")]
async fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: EmailTokenData = data.into_inner().data; let data: EmailTokenData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -411,7 +412,7 @@ async fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, co
err!("Invalid password") err!("Invalid password")
} }
if User::find_by_mail(&data.NewEmail, &conn).await.is_some() { if User::find_by_mail(&data.NewEmail, &mut conn).await.is_some() {
err!("Email already in use"); err!("Email already in use");
} }
@ -429,7 +430,7 @@ async fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, co
user.email_new = Some(data.NewEmail); user.email_new = Some(data.NewEmail);
user.email_new_token = Some(token); user.email_new_token = Some(token);
user.save(&conn).await user.save(&mut conn).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -444,7 +445,7 @@ struct ChangeEmailData {
} }
#[post("/accounts/email", data = "<data>")] #[post("/accounts/email", data = "<data>")]
async fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: ChangeEmailData = data.into_inner().data; let data: ChangeEmailData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -452,7 +453,7 @@ async fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: D
err!("Invalid password") err!("Invalid password")
} }
if User::find_by_mail(&data.NewEmail, &conn).await.is_some() { if User::find_by_mail(&data.NewEmail, &mut conn).await.is_some() {
err!("Email already in use"); err!("Email already in use");
} }
@ -487,7 +488,7 @@ async fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: D
user.set_password(&data.NewMasterPasswordHash, None); user.set_password(&data.NewMasterPasswordHash, None);
user.akey = data.Key; user.akey = data.Key;
user.save(&conn).await user.save(&mut conn).await
} }
#[post("/accounts/verify-email")] #[post("/accounts/verify-email")]
@ -513,10 +514,10 @@ struct VerifyEmailTokenData {
} }
#[post("/accounts/verify-email-token", data = "<data>")] #[post("/accounts/verify-email-token", data = "<data>")]
async fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, conn: DbConn) -> EmptyResult { async fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult {
let data: VerifyEmailTokenData = data.into_inner().data; let data: VerifyEmailTokenData = data.into_inner().data;
let mut user = match User::find_by_uuid(&data.UserId, &conn).await { let mut user = match User::find_by_uuid(&data.UserId, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("User doesn't exist"), None => err!("User doesn't exist"),
}; };
@ -531,7 +532,7 @@ async fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, conn: D
user.verified_at = Some(Utc::now().naive_utc()); user.verified_at = Some(Utc::now().naive_utc());
user.last_verifying_at = None; user.last_verifying_at = None;
user.login_verify_count = 0; user.login_verify_count = 0;
if let Err(e) = user.save(&conn).await { if let Err(e) = user.save(&mut conn).await {
error!("Error saving email verification: {:#?}", e); error!("Error saving email verification: {:#?}", e);
} }
@ -545,11 +546,11 @@ struct DeleteRecoverData {
} }
#[post("/accounts/delete-recover", data = "<data>")] #[post("/accounts/delete-recover", data = "<data>")]
async fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, conn: DbConn) -> EmptyResult { async fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, mut conn: DbConn) -> EmptyResult {
let data: DeleteRecoverData = data.into_inner().data; let data: DeleteRecoverData = data.into_inner().data;
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
if let Some(user) = User::find_by_mail(&data.Email, &conn).await { if let Some(user) = User::find_by_mail(&data.Email, &mut conn).await {
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await { if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
error!("Error sending delete account email: {:#?}", e); error!("Error sending delete account email: {:#?}", e);
} }
@ -572,10 +573,10 @@ struct DeleteRecoverTokenData {
} }
#[post("/accounts/delete-recover-token", data = "<data>")] #[post("/accounts/delete-recover-token", data = "<data>")]
async fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult { async fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult {
let data: DeleteRecoverTokenData = data.into_inner().data; let data: DeleteRecoverTokenData = data.into_inner().data;
let user = match User::find_by_uuid(&data.UserId, &conn).await { let user = match User::find_by_uuid(&data.UserId, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("User doesn't exist"), None => err!("User doesn't exist"),
}; };
@ -587,7 +588,7 @@ async fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, con
if claims.sub != user.uuid { if claims.sub != user.uuid {
err!("Invalid claim"); err!("Invalid claim");
} }
user.delete(&conn).await user.delete(&mut conn).await
} }
#[post("/accounts/delete", data = "<data>")] #[post("/accounts/delete", data = "<data>")]
@ -596,7 +597,7 @@ async fn post_delete_account(data: JsonUpcase<PasswordData>, headers: Headers, c
} }
#[delete("/accounts", data = "<data>")] #[delete("/accounts", data = "<data>")]
async fn delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn delete_account(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let user = headers.user; let user = headers.user;
@ -604,7 +605,7 @@ async fn delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn:
err!("Invalid password") err!("Invalid password")
} }
user.delete(&conn).await user.delete(&mut conn).await
} }
#[get("/accounts/revision-date")] #[get("/accounts/revision-date")]
@ -620,7 +621,7 @@ struct PasswordHintData {
} }
#[post("/accounts/password-hint", data = "<data>")] #[post("/accounts/password-hint", data = "<data>")]
async fn password_hint(data: JsonUpcase<PasswordHintData>, conn: DbConn) -> EmptyResult { async fn password_hint(data: JsonUpcase<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() { if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() {
err!("This server is not configured to provide password hints."); err!("This server is not configured to provide password hints.");
} }
@ -630,7 +631,7 @@ async fn password_hint(data: JsonUpcase<PasswordHintData>, conn: DbConn) -> Empt
let data: PasswordHintData = data.into_inner().data; let data: PasswordHintData = data.into_inner().data;
let email = &data.Email; let email = &data.Email;
match User::find_by_mail(email, &conn).await { match User::find_by_mail(email, &mut conn).await {
None => { None => {
// To prevent user enumeration, act as if the user exists. // To prevent user enumeration, act as if the user exists.
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
@ -672,10 +673,10 @@ async fn prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> Json<Value> {
_prelogin(data, conn).await _prelogin(data, conn).await
} }
pub async fn _prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> Json<Value> { pub async fn _prelogin(data: JsonUpcase<PreloginData>, mut conn: DbConn) -> Json<Value> {
let data: PreloginData = data.into_inner().data; let data: PreloginData = data.into_inner().data;
let (kdf_type, kdf_iter) = match User::find_by_mail(&data.Email, &conn).await { let (kdf_type, kdf_iter) = match User::find_by_mail(&data.Email, &mut conn).await {
Some(user) => (user.client_kdf_type, user.client_kdf_iter), Some(user) => (user.client_kdf_type, user.client_kdf_iter),
None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT), None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT),
}; };
@ -709,7 +710,7 @@ async fn _api_key(
data: JsonUpcase<SecretVerificationRequest>, data: JsonUpcase<SecretVerificationRequest>,
rotate: bool, rotate: bool,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let data: SecretVerificationRequest = data.into_inner().data; let data: SecretVerificationRequest = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -720,7 +721,7 @@ async fn _api_key(
if rotate || user.api_key.is_none() { if rotate || user.api_key.is_none() {
user.api_key = Some(crypto::generate_api_key()); user.api_key = Some(crypto::generate_api_key());
user.save(&conn).await.expect("Error saving API key"); user.save(&mut conn).await.expect("Error saving API key");
} }
Ok(Json(json!({ Ok(Json(json!({

View File

@ -1,7 +1,6 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use futures::{stream, stream::StreamExt};
use rocket::fs::TempFile; use rocket::fs::TempFile;
use rocket::serde::json::Json; use rocket::serde::json::Json;
use rocket::{ use rocket::{
@ -85,8 +84,8 @@ pub fn routes() -> Vec<Route> {
pub async fn purge_trashed_ciphers(pool: DbPool) { pub async fn purge_trashed_ciphers(pool: DbPool) {
debug!("Purging trashed ciphers"); debug!("Purging trashed ciphers");
if let Ok(conn) = pool.get().await { if let Ok(mut conn) = pool.get().await {
Cipher::purge_trash(&conn).await; Cipher::purge_trash(&mut conn).await;
} else { } else {
error!("Failed to get DB connection while purging trashed ciphers") error!("Failed to get DB connection while purging trashed ciphers")
} }
@ -99,39 +98,33 @@ struct SyncData {
} }
#[get("/sync?<data..>")] #[get("/sync?<data..>")]
async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> { async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json<Value> {
let user_json = headers.user.to_json(&conn).await; let user_json = headers.user.to_json(&mut conn).await;
// Get all ciphers which are visible by the user // Get all ciphers which are visible by the user
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await; let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await;
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::User, &conn).await; let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::User, &mut conn).await;
// Lets generate the ciphers_json using all the gathered info // Lets generate the ciphers_json using all the gathered info
let ciphers_json: Vec<Value> = stream::iter(ciphers) let mut ciphers_json = Vec::new();
.then(|c| async { for c in ciphers {
let c = c; // Move out this single variable ciphers_json.push(c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &mut conn).await);
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await }
})
.collect()
.await;
let collections_json: Vec<Value> = stream::iter(Collection::find_by_user_uuid(&headers.user.uuid, &conn).await) let mut collections_json = Vec::new();
.then(|c| async { for c in Collection::find_by_user_uuid(headers.user.uuid.clone(), &mut conn).await {
let c = c; // Move out this single variable collections_json.push(c.to_json_details(&headers.user.uuid, Some(&cipher_sync_data), &mut conn).await);
c.to_json_details(&headers.user.uuid, Some(&cipher_sync_data), &conn).await }
})
.collect()
.await;
let folders_json: Vec<Value> = let folders_json: Vec<Value> =
Folder::find_by_user(&headers.user.uuid, &conn).await.iter().map(Folder::to_json).collect(); Folder::find_by_user(&headers.user.uuid, &mut conn).await.iter().map(Folder::to_json).collect();
let sends_json: Vec<Value> = let sends_json: Vec<Value> =
Send::find_by_user(&headers.user.uuid, &conn).await.iter().map(Send::to_json).collect(); Send::find_by_user(&headers.user.uuid, &mut conn).await.iter().map(Send::to_json).collect();
let policies_json: Vec<Value> = let policies_json: Vec<Value> =
OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn).await.iter().map(OrgPolicy::to_json).collect(); OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &mut conn).await.iter().map(OrgPolicy::to_json).collect();
let domains_json = if data.exclude_domains { let domains_json = if data.exclude_domains {
Value::Null Value::Null
@ -153,17 +146,14 @@ async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> {
} }
#[get("/ciphers")] #[get("/ciphers")]
async fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> { async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await; let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await;
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::User, &conn).await; let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::User, &mut conn).await;
let ciphers_json = stream::iter(ciphers) let mut ciphers_json = Vec::new();
.then(|c| async { for c in ciphers {
let c = c; // Move out this single variable ciphers_json.push(c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &mut conn).await);
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await }
})
.collect::<Vec<Value>>()
.await;
Json(json!({ Json(json!({
"Data": ciphers_json, "Data": ciphers_json,
@ -173,17 +163,17 @@ async fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
} }
#[get("/ciphers/<uuid>")] #[get("/ciphers/<uuid>")]
async fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { async fn get_cipher(uuid: String, headers: Headers, mut conn: DbConn) -> JsonResult {
let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&headers.user.uuid, &conn).await { if !cipher.is_accessible_to_user(&headers.user.uuid, &mut conn).await {
err!("Cipher is not owned by user") err!("Cipher is not owned by user")
} }
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &mut conn).await))
} }
#[get("/ciphers/<uuid>/admin")] #[get("/ciphers/<uuid>/admin")]
@ -269,7 +259,7 @@ async fn post_ciphers_admin(
async fn post_ciphers_create( async fn post_ciphers_create(
data: JsonUpcase<ShareCipherData>, data: JsonUpcase<ShareCipherData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let mut data: ShareCipherData = data.into_inner().data; let mut data: ShareCipherData = data.into_inner().data;
@ -283,11 +273,11 @@ async fn post_ciphers_create(
// This check is usually only needed in update_cipher_from_data(), but we // This check is usually only needed in update_cipher_from_data(), but we
// need it here as well to avoid creating an empty cipher in the call to // need it here as well to avoid creating an empty cipher in the call to
// cipher.save() below. // cipher.save() below.
enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &conn).await?; enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &mut conn).await?;
let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone()); let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone());
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
cipher.save(&conn).await?; cipher.save(&mut conn).await?;
// When cloning a cipher, the Bitwarden clients seem to set this field // When cloning a cipher, the Bitwarden clients seem to set this field
// based on the cipher being cloned (when creating a new cipher, it's set // based on the cipher being cloned (when creating a new cipher, it's set
@ -297,12 +287,12 @@ async fn post_ciphers_create(
// or otherwise), we can just ignore this field entirely. // or otherwise), we can just ignore this field entirely.
data.Cipher.LastKnownRevisionDate = None; data.Cipher.LastKnownRevisionDate = None;
share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt).await share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await
} }
/// Called when creating a new user-owned cipher. /// Called when creating a new user-owned cipher.
#[post("/ciphers", data = "<data>")] #[post("/ciphers", data = "<data>")]
async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
let mut data: CipherData = data.into_inner().data; let mut data: CipherData = data.into_inner().data;
// The web/browser clients set this field to null as expected, but the // The web/browser clients set this field to null as expected, but the
@ -312,9 +302,9 @@ async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbCo
data.LastKnownRevisionDate = None; data.LastKnownRevisionDate = None;
let mut cipher = Cipher::new(data.Type, data.Name.clone()); let mut cipher = Cipher::new(data.Type, data.Name.clone());
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate).await?; update_cipher_from_data(&mut cipher, data, &headers, false, &mut conn, &nt, UpdateType::CipherCreate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &mut conn).await))
} }
/// Enforces the personal ownership policy on user-owned ciphers, if applicable. /// Enforces the personal ownership policy on user-owned ciphers, if applicable.
@ -324,7 +314,11 @@ async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbCo
/// allowed to delete or share such ciphers to an org, however. /// allowed to delete or share such ciphers to an org, however.
/// ///
/// Ref: https://bitwarden.com/help/article/policies/#personal-ownership /// Ref: https://bitwarden.com/help/article/policies/#personal-ownership
async fn enforce_personal_ownership_policy(data: Option<&CipherData>, headers: &Headers, conn: &DbConn) -> EmptyResult { async fn enforce_personal_ownership_policy(
data: Option<&CipherData>,
headers: &Headers,
conn: &mut DbConn,
) -> EmptyResult {
if data.is_none() || data.unwrap().OrganizationId.is_none() { if data.is_none() || data.unwrap().OrganizationId.is_none() {
let user_uuid = &headers.user.uuid; let user_uuid = &headers.user.uuid;
let policy_type = OrgPolicyType::PersonalOwnership; let policy_type = OrgPolicyType::PersonalOwnership;
@ -340,7 +334,7 @@ pub async fn update_cipher_from_data(
data: CipherData, data: CipherData,
headers: &Headers, headers: &Headers,
shared_to_collection: bool, shared_to_collection: bool,
conn: &DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
ut: UpdateType, ut: UpdateType,
) -> EmptyResult { ) -> EmptyResult {
@ -493,10 +487,10 @@ struct RelationsData {
async fn post_ciphers_import( async fn post_ciphers_import(
data: JsonUpcase<ImportData>, data: JsonUpcase<ImportData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
enforce_personal_ownership_policy(None, &headers, &conn).await?; enforce_personal_ownership_policy(None, &headers, &mut conn).await?;
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner().data;
@ -504,7 +498,7 @@ async fn post_ciphers_import(
let mut folders: Vec<_> = Vec::new(); let mut folders: Vec<_> = Vec::new();
for folder in data.Folders.into_iter() { for folder in data.Folders.into_iter() {
let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.Name); let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.Name);
new_folder.save(&conn).await?; new_folder.save(&mut conn).await?;
folders.push(new_folder); folders.push(new_folder);
} }
@ -522,11 +516,11 @@ async fn post_ciphers_import(
cipher_data.FolderId = folder_uuid; cipher_data.FolderId = folder_uuid;
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None).await?; update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &mut conn, &nt, UpdateType::None).await?;
} }
let mut user = headers.user; let mut user = headers.user;
user.update_revision(&conn).await?; user.update_revision(&mut conn).await?;
nt.send_user_update(UpdateType::Vault, &user).await; nt.send_user_update(UpdateType::Vault, &user).await;
Ok(()) Ok(())
} }
@ -570,12 +564,12 @@ async fn put_cipher(
uuid: String, uuid: String,
data: JsonUpcase<CipherData>, data: JsonUpcase<CipherData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: CipherData = data.into_inner().data; let data: CipherData = data.into_inner().data;
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn).await { let mut cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
@ -585,13 +579,13 @@ async fn put_cipher(
// cipher itself, so the user shouldn't need write access to change these. // cipher itself, so the user shouldn't need write access to change these.
// Interestingly, upstream Bitwarden doesn't properly handle this either. // Interestingly, upstream Bitwarden doesn't properly handle this either.
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate).await?; update_cipher_from_data(&mut cipher, data, &headers, false, &mut conn, &nt, UpdateType::CipherUpdate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &mut conn).await))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -635,34 +629,34 @@ async fn post_collections_admin(
uuid: String, uuid: String,
data: JsonUpcase<CollectionsAdminData>, data: JsonUpcase<CollectionsAdminData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
let data: CollectionsAdminData = data.into_inner().data; let data: CollectionsAdminData = data.into_inner().data;
let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect(); let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
let current_collections: HashSet<String> = let current_collections: HashSet<String> =
cipher.get_collections(&headers.user.uuid, &conn).await.iter().cloned().collect(); cipher.get_collections(headers.user.uuid.clone(), &mut conn).await.iter().cloned().collect();
for collection in posted_collections.symmetric_difference(&current_collections) { for collection in posted_collections.symmetric_difference(&current_collections) {
match Collection::find_by_uuid(collection, &conn).await { match Collection::find_by_uuid(collection, &mut conn).await {
None => err!("Invalid collection ID provided"), None => err!("Invalid collection ID provided"),
Some(collection) => { Some(collection) => {
if collection.is_writable_by_user(&headers.user.uuid, &conn).await { if collection.is_writable_by_user(&headers.user.uuid, &mut conn).await {
if posted_collections.contains(&collection.uuid) { if posted_collections.contains(&collection.uuid) {
// Add to collection // Add to collection
CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn).await?; CollectionCipher::save(&cipher.uuid, &collection.uuid, &mut conn).await?;
} else { } else {
// Remove from collection // Remove from collection
CollectionCipher::delete(&cipher.uuid, &collection.uuid, &conn).await?; CollectionCipher::delete(&cipher.uuid, &collection.uuid, &mut conn).await?;
} }
} else { } else {
err!("No rights to modify the collection") err!("No rights to modify the collection")
@ -686,12 +680,12 @@ async fn post_cipher_share(
uuid: String, uuid: String,
data: JsonUpcase<ShareCipherData>, data: JsonUpcase<ShareCipherData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt).await share_cipher_by_uuid(&uuid, data, &headers, &mut conn, &nt).await
} }
#[put("/ciphers/<uuid>/share", data = "<data>")] #[put("/ciphers/<uuid>/share", data = "<data>")]
@ -699,12 +693,12 @@ async fn put_cipher_share(
uuid: String, uuid: String,
data: JsonUpcase<ShareCipherData>, data: JsonUpcase<ShareCipherData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt).await share_cipher_by_uuid(&uuid, data, &headers, &mut conn, &nt).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -718,7 +712,7 @@ struct ShareSelectedCipherData {
async fn put_cipher_share_selected( async fn put_cipher_share_selected(
data: JsonUpcase<ShareSelectedCipherData>, data: JsonUpcase<ShareSelectedCipherData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let mut data: ShareSelectedCipherData = data.into_inner().data; let mut data: ShareSelectedCipherData = data.into_inner().data;
@ -746,7 +740,7 @@ async fn put_cipher_share_selected(
}; };
match shared_cipher_data.Cipher.Id.take() { match shared_cipher_data.Cipher.Id.take() {
Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &conn, &nt).await?, Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?,
None => err!("Request missing ids field"), None => err!("Request missing ids field"),
}; };
} }
@ -758,7 +752,7 @@ async fn share_cipher_by_uuid(
uuid: &str, uuid: &str,
data: ShareCipherData, data: ShareCipherData,
headers: &Headers, headers: &Headers,
conn: &DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { let mut cipher = match Cipher::find_by_uuid(uuid, conn).await {
@ -816,8 +810,8 @@ async fn share_cipher_by_uuid(
/// their object storage service. For self-hosted instances, it basically just /// their object storage service. For self-hosted instances, it basically just
/// redirects to the same location as before the v2 API. /// redirects to the same location as before the v2 API.
#[get("/ciphers/<uuid>/attachment/<attachment_id>")] #[get("/ciphers/<uuid>/attachment/<attachment_id>")]
async fn get_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn get_attachment(uuid: String, attachment_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
match Attachment::find_by_id(&attachment_id, &conn).await { match Attachment::find_by_id(&attachment_id, &mut conn).await {
Some(attachment) if uuid == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))), Some(attachment) if uuid == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))),
Some(_) => err!("Attachment doesn't belong to cipher"), Some(_) => err!("Attachment doesn't belong to cipher"),
None => err!("Attachment doesn't exist"), None => err!("Attachment doesn't exist"),
@ -847,14 +841,14 @@ async fn post_attachment_v2(
uuid: String, uuid: String,
data: JsonUpcase<AttachmentRequestData>, data: JsonUpcase<AttachmentRequestData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
@ -862,7 +856,7 @@ async fn post_attachment_v2(
let data: AttachmentRequestData = data.into_inner().data; let data: AttachmentRequestData = data.into_inner().data;
let attachment = let attachment =
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, data.FileSize, Some(data.Key)); Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, data.FileSize, Some(data.Key));
attachment.save(&conn).await.expect("Error saving attachment"); attachment.save(&mut conn).await.expect("Error saving attachment");
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id); let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
let response_key = match data.AdminRequest { let response_key = match data.AdminRequest {
@ -875,7 +869,7 @@ async fn post_attachment_v2(
"AttachmentId": attachment_id, "AttachmentId": attachment_id,
"Url": url, "Url": url,
"FileUploadType": FileUploadType::Direct as i32, "FileUploadType": FileUploadType::Direct as i32,
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await, response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, &mut conn).await,
}))) })))
} }
@ -898,15 +892,15 @@ async fn save_attachment(
cipher_uuid: String, cipher_uuid: String,
data: Form<UploadData<'_>>, data: Form<UploadData<'_>>,
headers: &Headers, headers: &Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> Result<(Cipher, DbConn), crate::error::Error> { ) -> Result<(Cipher, DbConn), crate::error::Error> {
let cipher = match Cipher::find_by_uuid(&cipher_uuid, &conn).await { let cipher = match Cipher::find_by_uuid(&cipher_uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await {
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
@ -921,7 +915,7 @@ async fn save_attachment(
match CONFIG.user_attachment_limit() { match CONFIG.user_attachment_limit() {
Some(0) => err!("Attachments are disabled"), Some(0) => err!("Attachments are disabled"),
Some(limit_kb) => { Some(limit_kb) => {
let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, &conn).await + size_adjust; let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, &mut conn).await + size_adjust;
if left <= 0 { if left <= 0 {
err!("Attachment storage limit reached! Delete some attachments to free up space") err!("Attachment storage limit reached! Delete some attachments to free up space")
} }
@ -933,7 +927,7 @@ async fn save_attachment(
match CONFIG.org_attachment_limit() { match CONFIG.org_attachment_limit() {
Some(0) => err!("Attachments are disabled"), Some(0) => err!("Attachments are disabled"),
Some(limit_kb) => { Some(limit_kb) => {
let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, &conn).await + size_adjust; let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, &mut conn).await + size_adjust;
if left <= 0 { if left <= 0 {
err!("Attachment storage limit reached! Delete some attachments to free up space") err!("Attachment storage limit reached! Delete some attachments to free up space")
} }
@ -990,10 +984,10 @@ async fn save_attachment(
if size != attachment.file_size { if size != attachment.file_size {
// Update the attachment with the actual file size. // Update the attachment with the actual file size.
attachment.file_size = size; attachment.file_size = size;
attachment.save(&conn).await.expect("Error updating attachment"); attachment.save(&mut conn).await.expect("Error updating attachment");
} }
} else { } else {
attachment.delete(&conn).await.ok(); attachment.delete(&mut conn).await.ok();
err!(format!("Attachment size mismatch (expected within [{}, {}], got {})", min_size, max_size, size)); err!(format!("Attachment size mismatch (expected within [{}, {}], got {})", min_size, max_size, size));
} }
@ -1008,14 +1002,14 @@ async fn save_attachment(
err!("No attachment key provided") err!("No attachment key provided")
} }
let attachment = Attachment::new(file_id, cipher_uuid.clone(), encrypted_filename.unwrap(), size, data.key); let attachment = Attachment::new(file_id, cipher_uuid.clone(), encrypted_filename.unwrap(), size, data.key);
attachment.save(&conn).await.expect("Error saving attachment"); attachment.save(&mut conn).await.expect("Error saving attachment");
} }
if let Err(_err) = data.data.persist_to(&file_path).await { if let Err(_err) = data.data.persist_to(&file_path).await {
data.data.move_copy_to(file_path).await? data.data.move_copy_to(file_path).await?
} }
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn).await).await; nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&mut conn).await).await;
Ok((cipher, conn)) Ok((cipher, conn))
} }
@ -1030,10 +1024,10 @@ async fn post_attachment_v2_data(
attachment_id: String, attachment_id: String,
data: Form<UploadData<'_>>, data: Form<UploadData<'_>>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let attachment = match Attachment::find_by_id(&attachment_id, &conn).await { let attachment = match Attachment::find_by_id(&attachment_id, &mut conn).await {
Some(attachment) if uuid == attachment.cipher_uuid => Some(attachment), Some(attachment) if uuid == attachment.cipher_uuid => Some(attachment),
Some(_) => err!("Attachment doesn't belong to cipher"), Some(_) => err!("Attachment doesn't belong to cipher"),
None => err!("Attachment doesn't exist"), None => err!("Attachment doesn't exist"),
@ -1057,9 +1051,9 @@ async fn post_attachment(
// the attachment database record as well as saving the data to disk. // the attachment database record as well as saving the data to disk.
let attachment = None; let attachment = None;
let (cipher, conn) = save_attachment(attachment, uuid, data, &headers, conn, nt).await?; let (cipher, mut conn) = save_attachment(attachment, uuid, data, &headers, conn, nt).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &mut conn).await))
} }
#[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")] #[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")]
@ -1079,10 +1073,10 @@ async fn post_attachment_share(
attachment_id: String, attachment_id: String,
data: Form<UploadData<'_>>, data: Form<UploadData<'_>>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt).await?; _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &mut conn, &nt).await?;
post_attachment(uuid, data, headers, conn, nt).await post_attachment(uuid, data, headers, conn, nt).await
} }
@ -1113,10 +1107,10 @@ async fn delete_attachment(
uuid: String, uuid: String,
attachment_id: String, attachment_id: String,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt).await _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &mut conn, &nt).await
} }
#[delete("/ciphers/<uuid>/attachment/<attachment_id>/admin")] #[delete("/ciphers/<uuid>/attachment/<attachment_id>/admin")]
@ -1124,40 +1118,40 @@ async fn delete_attachment_admin(
uuid: String, uuid: String,
attachment_id: String, attachment_id: String,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt).await _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &mut conn, &nt).await
} }
#[post("/ciphers/<uuid>/delete")] #[post("/ciphers/<uuid>/delete")]
async fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_cipher_post(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await _delete_cipher_by_uuid(&uuid, &headers, &mut conn, false, &nt).await
} }
#[post("/ciphers/<uuid>/delete-admin")] #[post("/ciphers/<uuid>/delete-admin")]
async fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_cipher_post_admin(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await _delete_cipher_by_uuid(&uuid, &headers, &mut conn, false, &nt).await
} }
#[put("/ciphers/<uuid>/delete")] #[put("/ciphers/<uuid>/delete")]
async fn delete_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_cipher_put(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt).await _delete_cipher_by_uuid(&uuid, &headers, &mut conn, true, &nt).await
} }
#[put("/ciphers/<uuid>/delete-admin")] #[put("/ciphers/<uuid>/delete-admin")]
async fn delete_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_cipher_put_admin(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt).await _delete_cipher_by_uuid(&uuid, &headers, &mut conn, true, &nt).await
} }
#[delete("/ciphers/<uuid>")] #[delete("/ciphers/<uuid>")]
async fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_cipher(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await _delete_cipher_by_uuid(&uuid, &headers, &mut conn, false, &nt).await
} }
#[delete("/ciphers/<uuid>/admin")] #[delete("/ciphers/<uuid>/admin")]
async fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_cipher_admin(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await _delete_cipher_by_uuid(&uuid, &headers, &mut conn, false, &nt).await
} }
#[delete("/ciphers", data = "<data>")] #[delete("/ciphers", data = "<data>")]
@ -1221,23 +1215,23 @@ async fn delete_cipher_selected_put_admin(
} }
#[put("/ciphers/<uuid>/restore")] #[put("/ciphers/<uuid>/restore")]
async fn restore_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn restore_cipher_put(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
_restore_cipher_by_uuid(&uuid, &headers, &conn, &nt).await _restore_cipher_by_uuid(&uuid, &headers, &mut conn, &nt).await
} }
#[put("/ciphers/<uuid>/restore-admin")] #[put("/ciphers/<uuid>/restore-admin")]
async fn restore_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn restore_cipher_put_admin(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
_restore_cipher_by_uuid(&uuid, &headers, &conn, &nt).await _restore_cipher_by_uuid(&uuid, &headers, &mut conn, &nt).await
} }
#[put("/ciphers/restore", data = "<data>")] #[put("/ciphers/restore", data = "<data>")]
async fn restore_cipher_selected( async fn restore_cipher_selected(
data: JsonUpcase<Value>, data: JsonUpcase<Value>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
_restore_multiple_ciphers(data, &headers, &conn, &nt).await _restore_multiple_ciphers(data, &headers, &mut conn, &nt).await
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -1251,14 +1245,14 @@ struct MoveCipherData {
async fn move_cipher_selected( async fn move_cipher_selected(
data: JsonUpcase<MoveCipherData>, data: JsonUpcase<MoveCipherData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let data = data.into_inner().data; let data = data.into_inner().data;
let user_uuid = headers.user.uuid; let user_uuid = headers.user.uuid;
if let Some(ref folder_id) = data.FolderId { if let Some(ref folder_id) = data.FolderId {
match Folder::find_by_uuid(folder_id, &conn).await { match Folder::find_by_uuid(folder_id, &mut conn).await {
Some(folder) => { Some(folder) => {
if folder.user_uuid != user_uuid { if folder.user_uuid != user_uuid {
err!("Folder is not owned by user") err!("Folder is not owned by user")
@ -1269,17 +1263,17 @@ async fn move_cipher_selected(
} }
for uuid in data.Ids { for uuid in data.Ids {
let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&user_uuid, &conn).await { if !cipher.is_accessible_to_user(&user_uuid, &mut conn).await {
err!("Cipher is not accessible by user") err!("Cipher is not accessible by user")
} }
// Move cipher // Move cipher
cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &conn).await?; cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &mut conn).await?;
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &[user_uuid.clone()]).await; nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &[user_uuid.clone()]).await;
} }
@ -1308,7 +1302,7 @@ async fn delete_all(
organization: Option<OrganizationId>, organization: Option<OrganizationId>,
data: JsonUpcase<PasswordData>, data: JsonUpcase<PasswordData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
@ -1323,11 +1317,11 @@ async fn delete_all(
match organization { match organization {
Some(org_data) => { Some(org_data) => {
// Organization ID in query params, purging organization vault // Organization ID in query params, purging organization vault
match UserOrganization::find_by_user_and_org(&user.uuid, &org_data.org_id, &conn).await { match UserOrganization::find_by_user_and_org(&user.uuid, &org_data.org_id, &mut conn).await {
None => err!("You don't have permission to purge the organization vault"), None => err!("You don't have permission to purge the organization vault"),
Some(user_org) => { Some(user_org) => {
if user_org.atype == UserOrgType::Owner { if user_org.atype == UserOrgType::Owner {
Cipher::delete_all_by_organization(&org_data.org_id, &conn).await?; Cipher::delete_all_by_organization(&org_data.org_id, &mut conn).await?;
nt.send_user_update(UpdateType::Vault, &user).await; nt.send_user_update(UpdateType::Vault, &user).await;
Ok(()) Ok(())
} else { } else {
@ -1339,16 +1333,16 @@ async fn delete_all(
None => { None => {
// No organization ID in query params, purging user vault // No organization ID in query params, purging user vault
// Delete ciphers and their attachments // Delete ciphers and their attachments
for cipher in Cipher::find_owned_by_user(&user.uuid, &conn).await { for cipher in Cipher::find_owned_by_user(&user.uuid, &mut conn).await {
cipher.delete(&conn).await?; cipher.delete(&mut conn).await?;
} }
// Delete folders // Delete folders
for f in Folder::find_by_user(&user.uuid, &conn).await { for f in Folder::find_by_user(&user.uuid, &mut conn).await {
f.delete(&conn).await?; f.delete(&mut conn).await?;
} }
user.update_revision(&conn).await?; user.update_revision(&mut conn).await?;
nt.send_user_update(UpdateType::Vault, &user).await; nt.send_user_update(UpdateType::Vault, &user).await;
Ok(()) Ok(())
} }
@ -1358,7 +1352,7 @@ async fn delete_all(
async fn _delete_cipher_by_uuid( async fn _delete_cipher_by_uuid(
uuid: &str, uuid: &str,
headers: &Headers, headers: &Headers,
conn: &DbConn, conn: &mut DbConn,
soft_delete: bool, soft_delete: bool,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
@ -1386,7 +1380,7 @@ async fn _delete_cipher_by_uuid(
async fn _delete_multiple_ciphers( async fn _delete_multiple_ciphers(
data: JsonUpcase<Value>, data: JsonUpcase<Value>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
soft_delete: bool, soft_delete: bool,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
@ -1401,7 +1395,7 @@ async fn _delete_multiple_ciphers(
}; };
for uuid in uuids { for uuid in uuids {
if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, soft_delete, &nt).await { if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &mut conn, soft_delete, &nt).await {
return error; return error;
}; };
} }
@ -1409,7 +1403,7 @@ async fn _delete_multiple_ciphers(
Ok(()) Ok(())
} }
async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify<'_>) -> JsonResult { async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &mut DbConn, nt: &Notify<'_>) -> JsonResult {
let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { let mut cipher = match Cipher::find_by_uuid(uuid, conn).await {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
@ -1429,7 +1423,7 @@ async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, n
async fn _restore_multiple_ciphers( async fn _restore_multiple_ciphers(
data: JsonUpcase<Value>, data: JsonUpcase<Value>,
headers: &Headers, headers: &Headers,
conn: &DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: Value = data.into_inner().data; let data: Value = data.into_inner().data;
@ -1461,7 +1455,7 @@ async fn _delete_cipher_attachment_by_id(
uuid: &str, uuid: &str,
attachment_id: &str, attachment_id: &str,
headers: &Headers, headers: &Headers,
conn: &DbConn, conn: &mut DbConn,
nt: &Notify<'_>, nt: &Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
let attachment = match Attachment::find_by_id(attachment_id, conn).await { let attachment = match Attachment::find_by_id(attachment_id, conn).await {
@ -1509,9 +1503,9 @@ pub enum CipherSyncType {
} }
impl CipherSyncData { impl CipherSyncData {
pub async fn new(user_uuid: &str, ciphers: &Vec<Cipher>, sync_type: CipherSyncType, conn: &DbConn) -> Self { pub async fn new(user_uuid: &str, ciphers: &[Cipher], sync_type: CipherSyncType, conn: &mut DbConn) -> Self {
// Generate a list of Cipher UUID's to be used during a query filter with an eq_any. // Generate a list of Cipher UUID's to be used during a query filter with an eq_any.
let cipher_uuids = stream::iter(ciphers).map(|c| c.uuid.clone()).collect::<Vec<String>>().await; let cipher_uuids = ciphers.iter().map(|c| c.uuid.clone()).collect();
let mut cipher_folders: HashMap<String, String> = HashMap::new(); let mut cipher_folders: HashMap<String, String> = HashMap::new();
let mut cipher_favorites: HashSet<String> = HashSet::new(); let mut cipher_favorites: HashSet<String> = HashSet::new();
@ -1519,11 +1513,10 @@ impl CipherSyncData {
// User Sync supports Folders and Favorits // User Sync supports Folders and Favorits
CipherSyncType::User => { CipherSyncType::User => {
// Generate a HashMap with the Cipher UUID as key and the Folder UUID as value // Generate a HashMap with the Cipher UUID as key and the Folder UUID as value
cipher_folders = stream::iter(FolderCipher::find_by_user(user_uuid, conn).await).collect().await; cipher_folders = FolderCipher::find_by_user(user_uuid, conn).await.into_iter().collect();
// Generate a HashSet of all the Cipher UUID's which are marked as favorite // Generate a HashSet of all the Cipher UUID's which are marked as favorite
cipher_favorites = cipher_favorites = Favorite::get_all_cipher_uuid_by_user(user_uuid, conn).await.into_iter().collect();
stream::iter(Favorite::get_all_cipher_uuid_by_user(user_uuid, conn).await).collect().await;
} }
// Organization Sync does not support Folders and Favorits. // Organization Sync does not support Folders and Favorits.
// If these are set, it will cause issues in the web-vault. // If these are set, it will cause issues in the web-vault.
@ -1538,33 +1531,34 @@ impl CipherSyncData {
// Generate a HashMap with the Cipher UUID as key and one or more Collection UUID's // Generate a HashMap with the Cipher UUID as key and one or more Collection UUID's
let mut cipher_collections: HashMap<String, Vec<String>> = HashMap::new(); let mut cipher_collections: HashMap<String, Vec<String>> = HashMap::new();
for (cipher, collection) in Cipher::get_collections_with_cipher_by_user(user_uuid, conn).await { for (cipher, collection) in Cipher::get_collections_with_cipher_by_user(user_uuid.to_string(), conn).await {
cipher_collections.entry(cipher).or_default().push(collection); cipher_collections.entry(cipher).or_default().push(collection);
} }
// Generate a HashMap with the Organization UUID as key and the UserOrganization record // Generate a HashMap with the Organization UUID as key and the UserOrganization record
let user_organizations: HashMap<String, UserOrganization> = let user_organizations: HashMap<String, UserOrganization> = UserOrganization::find_by_user(user_uuid, conn)
stream::iter(UserOrganization::find_by_user(user_uuid, conn).await) .await
.map(|uo| (uo.org_uuid.clone(), uo)) .into_iter()
.collect() .map(|uo| (uo.org_uuid.clone(), uo))
.await; .collect();
// Generate a HashMap with the User_Collections UUID as key and the CollectionUser record // Generate a HashMap with the User_Collections UUID as key and the CollectionUser record
let user_collections: HashMap<String, CollectionUser> = let user_collections: HashMap<String, CollectionUser> = CollectionUser::find_by_user(user_uuid, conn)
stream::iter(CollectionUser::find_by_user(user_uuid, conn).await) .await
.map(|uc| (uc.collection_uuid.clone(), uc)) .into_iter()
.collect() .map(|uc| (uc.collection_uuid.clone(), uc))
.await; .collect();
// Generate a HashMap with the collections_uuid as key and the CollectionGroup record // Generate a HashMap with the collections_uuid as key and the CollectionGroup record
let user_collections_groups = stream::iter(CollectionGroup::find_by_user(user_uuid, conn).await) let user_collections_groups = CollectionGroup::find_by_user(user_uuid, conn)
.await
.into_iter()
.map(|collection_group| (collection_group.collections_uuid.clone(), collection_group)) .map(|collection_group| (collection_group.collections_uuid.clone(), collection_group))
.collect() .collect();
.await;
// Get all organizations that the user has full access to via group assignement // Get all organizations that the user has full access to via group assignement
let user_group_full_access_for_organizations = let user_group_full_access_for_organizations =
stream::iter(Group::gather_user_organizations_full_access(user_uuid, conn).await).collect().await; Group::gather_user_organizations_full_access(user_uuid, conn).await.into_iter().collect();
Self { Self {
cipher_attachments, cipher_attachments,

View File

@ -2,7 +2,6 @@ use chrono::{Duration, Utc};
use rocket::serde::json::Json; use rocket::serde::json::Json;
use rocket::Route; use rocket::Route;
use serde_json::Value; use serde_json::Value;
use std::borrow::Borrow;
use crate::{ use crate::{
api::{ api::{
@ -14,8 +13,6 @@ use crate::{
mail, CONFIG, mail, CONFIG,
}; };
use futures::{stream, stream::StreamExt};
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![ routes![
get_contacts, get_contacts,
@ -41,17 +38,13 @@ pub fn routes() -> Vec<Route> {
// region get // region get
#[get("/emergency-access/trusted")] #[get("/emergency-access/trusted")]
async fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult { async fn get_contacts(headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let emergency_access_list_json = let mut emergency_access_list_json = Vec::new();
stream::iter(EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await) for e in EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await {
.then(|e| async { emergency_access_list_json.push(e.to_json_grantee_details(&mut conn).await);
let e = e; // Move out this single variable }
e.to_json_grantee_details(&conn).await
})
.collect::<Vec<Value>>()
.await;
Ok(Json(json!({ Ok(Json(json!({
"Data": emergency_access_list_json, "Data": emergency_access_list_json,
@ -61,17 +54,13 @@ async fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult {
} }
#[get("/emergency-access/granted")] #[get("/emergency-access/granted")]
async fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult { async fn get_grantees(headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let emergency_access_list_json = let mut emergency_access_list_json = Vec::new();
stream::iter(EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &conn).await) for e in EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &mut conn).await {
.then(|e| async { emergency_access_list_json.push(e.to_json_grantor_details(&mut conn).await);
let e = e; // Move out this single variable }
e.to_json_grantor_details(&conn).await
})
.collect::<Vec<Value>>()
.await;
Ok(Json(json!({ Ok(Json(json!({
"Data": emergency_access_list_json, "Data": emergency_access_list_json,
@ -81,11 +70,11 @@ async fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult {
} }
#[get("/emergency-access/<emer_id>")] #[get("/emergency-access/<emer_id>")]
async fn get_emergency_access(emer_id: String, conn: DbConn) -> JsonResult { async fn get_emergency_access(emer_id: String, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&conn).await)), Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&mut conn).await)),
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
} }
} }
@ -115,13 +104,13 @@ async fn put_emergency_access(
async fn post_emergency_access( async fn post_emergency_access(
emer_id: String, emer_id: String,
data: JsonUpcase<EmergencyAccessUpdateData>, data: JsonUpcase<EmergencyAccessUpdateData>,
conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let data: EmergencyAccessUpdateData = data.into_inner().data; let data: EmergencyAccessUpdateData = data.into_inner().data;
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emergency_access) => emergency_access, Some(emergency_access) => emergency_access,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -135,7 +124,7 @@ async fn post_emergency_access(
emergency_access.wait_time_days = data.WaitTimeDays; emergency_access.wait_time_days = data.WaitTimeDays;
emergency_access.key_encrypted = data.KeyEncrypted; emergency_access.key_encrypted = data.KeyEncrypted;
emergency_access.save(&conn).await?; emergency_access.save(&mut conn).await?;
Ok(Json(emergency_access.to_json())) Ok(Json(emergency_access.to_json()))
} }
@ -144,12 +133,12 @@ async fn post_emergency_access(
// region delete // region delete
#[delete("/emergency-access/<emer_id>")] #[delete("/emergency-access/<emer_id>")]
async fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { async fn delete_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let grantor_user = headers.user; let grantor_user = headers.user;
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => { Some(emer) => {
if emer.grantor_uuid != grantor_user.uuid && emer.grantee_uuid != Some(grantor_user.uuid) { if emer.grantor_uuid != grantor_user.uuid && emer.grantee_uuid != Some(grantor_user.uuid) {
err!("Emergency access not valid.") err!("Emergency access not valid.")
@ -158,7 +147,7 @@ async fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn
} }
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
emergency_access.delete(&conn).await?; emergency_access.delete(&mut conn).await?;
Ok(()) Ok(())
} }
@ -180,7 +169,7 @@ struct EmergencyAccessInviteData {
} }
#[post("/emergency-access/invite", data = "<data>")] #[post("/emergency-access/invite", data = "<data>")]
async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let data: EmergencyAccessInviteData = data.into_inner().data; let data: EmergencyAccessInviteData = data.into_inner().data;
@ -201,7 +190,7 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
err!("You can not set yourself as an emergency contact.") err!("You can not set yourself as an emergency contact.")
} }
let grantee_user = match User::find_by_mail(&email, &conn).await { let grantee_user = match User::find_by_mail(&email, &mut conn).await {
None => { None => {
if !CONFIG.invitations_allowed() { if !CONFIG.invitations_allowed() {
err!(format!("Grantee user does not exist: {}", email)) err!(format!("Grantee user does not exist: {}", email))
@ -213,11 +202,11 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
if !CONFIG.mail_enabled() { if !CONFIG.mail_enabled() {
let invitation = Invitation::new(email.clone()); let invitation = Invitation::new(email.clone());
invitation.save(&conn).await?; invitation.save(&mut conn).await?;
} }
let mut user = User::new(email.clone()); let mut user = User::new(email.clone());
user.save(&conn).await?; user.save(&mut conn).await?;
user user
} }
Some(user) => user, Some(user) => user,
@ -227,7 +216,7 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
&grantor_user.uuid, &grantor_user.uuid,
&grantee_user.uuid, &grantee_user.uuid,
&grantee_user.email, &grantee_user.email,
&conn, &mut conn,
) )
.await .await
.is_some() .is_some()
@ -242,7 +231,7 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
new_type, new_type,
wait_time_days, wait_time_days,
); );
new_emergency_access.save(&conn).await?; new_emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
mail::send_emergency_access_invite( mail::send_emergency_access_invite(
@ -255,9 +244,9 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
.await?; .await?;
} else { } else {
// Automatically mark user as accepted if no email invites // Automatically mark user as accepted if no email invites
match User::find_by_mail(&email, &conn).await { match User::find_by_mail(&email, &mut conn).await {
Some(user) => { Some(user) => {
match accept_invite_process(user.uuid, new_emergency_access.uuid, Some(email), conn.borrow()).await { match accept_invite_process(user.uuid, new_emergency_access.uuid, Some(email), &mut conn).await {
Ok(v) => v, Ok(v) => v,
Err(e) => err!(e.to_string()), Err(e) => err!(e.to_string()),
} }
@ -270,10 +259,10 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
} }
#[post("/emergency-access/<emer_id>/reinvite")] #[post("/emergency-access/<emer_id>/reinvite")]
async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { async fn resend_invite(emer_id: String, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -291,7 +280,7 @@ async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> Empty
None => err!("Email not valid."), None => err!("Email not valid."),
}; };
let grantee_user = match User::find_by_mail(&email, &conn).await { let grantee_user = match User::find_by_mail(&email, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantee user not found."), None => err!("Grantee user not found."),
}; };
@ -308,15 +297,13 @@ async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> Empty
) )
.await?; .await?;
} else { } else {
if Invitation::find_by_mail(&email, &conn).await.is_none() { if Invitation::find_by_mail(&email, &mut conn).await.is_none() {
let invitation = Invitation::new(email); let invitation = Invitation::new(email);
invitation.save(&conn).await?; invitation.save(&mut conn).await?;
} }
// Automatically mark user as accepted if no email invites // Automatically mark user as accepted if no email invites
match accept_invite_process(grantee_user.uuid, emergency_access.uuid, emergency_access.email, conn.borrow()) match accept_invite_process(grantee_user.uuid, emergency_access.uuid, emergency_access.email, &mut conn).await {
.await
{
Ok(v) => v, Ok(v) => v,
Err(e) => err!(e.to_string()), Err(e) => err!(e.to_string()),
} }
@ -332,28 +319,28 @@ struct AcceptData {
} }
#[post("/emergency-access/<emer_id>/accept", data = "<data>")] #[post("/emergency-access/<emer_id>/accept", data = "<data>")]
async fn accept_invite(emer_id: String, data: JsonUpcase<AcceptData>, conn: DbConn) -> EmptyResult { async fn accept_invite(emer_id: String, data: JsonUpcase<AcceptData>, mut conn: DbConn) -> EmptyResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let data: AcceptData = data.into_inner().data; let data: AcceptData = data.into_inner().data;
let token = &data.Token; let token = &data.Token;
let claims = decode_emergency_access_invite(token)?; let claims = decode_emergency_access_invite(token)?;
let grantee_user = match User::find_by_mail(&claims.email, &conn).await { let grantee_user = match User::find_by_mail(&claims.email, &mut conn).await {
Some(user) => { Some(user) => {
Invitation::take(&claims.email, &conn).await; Invitation::take(&claims.email, &mut conn).await;
user user
} }
None => err!("Invited user not found"), None => err!("Invited user not found"),
}; };
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
// get grantor user to send Accepted email // get grantor user to send Accepted email
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
@ -362,7 +349,9 @@ async fn accept_invite(emer_id: String, data: JsonUpcase<AcceptData>, conn: DbCo
&& (claims.grantor_name.is_some() && grantor_user.name == claims.grantor_name.unwrap()) && (claims.grantor_name.is_some() && grantor_user.name == claims.grantor_name.unwrap())
&& (claims.grantor_email.is_some() && grantor_user.email == claims.grantor_email.unwrap()) && (claims.grantor_email.is_some() && grantor_user.email == claims.grantor_email.unwrap())
{ {
match accept_invite_process(grantee_user.uuid.clone(), emer_id, Some(grantee_user.email.clone()), &conn).await { match accept_invite_process(grantee_user.uuid.clone(), emer_id, Some(grantee_user.email.clone()), &mut conn)
.await
{
Ok(v) => v, Ok(v) => v,
Err(e) => err!(e.to_string()), Err(e) => err!(e.to_string()),
} }
@ -381,7 +370,7 @@ async fn accept_invite_process(
grantee_uuid: String, grantee_uuid: String,
emer_id: String, emer_id: String,
email: Option<String>, email: Option<String>,
conn: &DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, conn).await {
Some(emer) => emer, Some(emer) => emer,
@ -414,7 +403,7 @@ async fn confirm_emergency_access(
emer_id: String, emer_id: String,
data: JsonUpcase<ConfirmData>, data: JsonUpcase<ConfirmData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
@ -422,7 +411,7 @@ async fn confirm_emergency_access(
let data: ConfirmData = data.into_inner().data; let data: ConfirmData = data.into_inner().data;
let key = data.Key; let key = data.Key;
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -433,13 +422,13 @@ async fn confirm_emergency_access(
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &conn).await { let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await { let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantee user not found."), None => err!("Grantee user not found."),
}; };
@ -448,7 +437,7 @@ async fn confirm_emergency_access(
emergency_access.key_encrypted = Some(key); emergency_access.key_encrypted = Some(key);
emergency_access.email = None; emergency_access.email = None;
emergency_access.save(&conn).await?; emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?; mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?;
@ -464,11 +453,11 @@ async fn confirm_emergency_access(
// region access emergency access // region access emergency access
#[post("/emergency-access/<emer_id>/initiate")] #[post("/emergency-access/<emer_id>/initiate")]
async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn initiate_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let initiating_user = headers.user; let initiating_user = headers.user;
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -479,7 +468,7 @@ async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbCo
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
@ -489,7 +478,7 @@ async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbCo
emergency_access.updated_at = now; emergency_access.updated_at = now;
emergency_access.recovery_initiated_at = Some(now); emergency_access.recovery_initiated_at = Some(now);
emergency_access.last_notification_at = Some(now); emergency_access.last_notification_at = Some(now);
emergency_access.save(&conn).await?; emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
mail::send_emergency_access_recovery_initiated( mail::send_emergency_access_recovery_initiated(
@ -504,11 +493,11 @@ async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbCo
} }
#[post("/emergency-access/<emer_id>/approve")] #[post("/emergency-access/<emer_id>/approve")]
async fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn approve_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let approving_user = headers.user; let approving_user = headers.user;
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -519,19 +508,19 @@ async fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbCon
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&approving_user.uuid, &conn).await { let grantor_user = match User::find_by_uuid(&approving_user.uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await { let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantee user not found."), None => err!("Grantee user not found."),
}; };
emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32; emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32;
emergency_access.save(&conn).await?; emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?; mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?;
@ -543,11 +532,11 @@ async fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbCon
} }
#[post("/emergency-access/<emer_id>/reject")] #[post("/emergency-access/<emer_id>/reject")]
async fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn reject_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let rejecting_user = headers.user; let rejecting_user = headers.user;
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -559,19 +548,19 @@ async fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&rejecting_user.uuid, &conn).await { let grantor_user = match User::find_by_uuid(&rejecting_user.uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await { let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantee user not found."), None => err!("Grantee user not found."),
}; };
emergency_access.status = EmergencyAccessStatus::Confirmed as i32; emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
emergency_access.save(&conn).await?; emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name).await?; mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name).await?;
@ -587,12 +576,12 @@ async fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn
// region action // region action
#[post("/emergency-access/<emer_id>/view")] #[post("/emergency-access/<emer_id>/view")]
async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn view_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let requesting_user = headers.user; let requesting_user = headers.user;
let host = headers.host; let host = headers.host;
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -601,17 +590,14 @@ async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn)
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await; let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &mut conn).await;
let cipher_sync_data = let cipher_sync_data =
CipherSyncData::new(&emergency_access.grantor_uuid, &ciphers, CipherSyncType::User, &conn).await; CipherSyncData::new(&emergency_access.grantor_uuid, &ciphers, CipherSyncType::User, &mut conn).await;
let ciphers_json = stream::iter(ciphers) let mut ciphers_json = Vec::new();
.then(|c| async { for c in ciphers {
let c = c; // Move out this single variable ciphers_json.push(c.to_json(&host, &emergency_access.grantor_uuid, Some(&cipher_sync_data), &mut conn).await);
c.to_json(&host, &emergency_access.grantor_uuid, Some(&cipher_sync_data), &conn).await }
})
.collect::<Vec<Value>>()
.await;
Ok(Json(json!({ Ok(Json(json!({
"Ciphers": ciphers_json, "Ciphers": ciphers_json,
@ -621,11 +607,11 @@ async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn)
} }
#[post("/emergency-access/<emer_id>/takeover")] #[post("/emergency-access/<emer_id>/takeover")]
async fn takeover_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn takeover_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
let requesting_user = headers.user; let requesting_user = headers.user;
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -634,7 +620,7 @@ async fn takeover_emergency_access(emer_id: String, headers: Headers, conn: DbCo
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
@ -659,7 +645,7 @@ async fn password_emergency_access(
emer_id: String, emer_id: String,
data: JsonUpcase<EmergencyAccessPasswordData>, data: JsonUpcase<EmergencyAccessPasswordData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
check_emergency_access_allowed()?; check_emergency_access_allowed()?;
@ -668,7 +654,7 @@ async fn password_emergency_access(
let key = data.Key; let key = data.Key;
let requesting_user = headers.user; let requesting_user = headers.user;
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -677,7 +663,7 @@ async fn password_emergency_access(
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
@ -685,15 +671,15 @@ async fn password_emergency_access(
// change grantor_user password // change grantor_user password
grantor_user.set_password(new_master_password_hash, None); grantor_user.set_password(new_master_password_hash, None);
grantor_user.akey = key; grantor_user.akey = key;
grantor_user.save(&conn).await?; grantor_user.save(&mut conn).await?;
// Disable TwoFactor providers since they will otherwise block logins // Disable TwoFactor providers since they will otherwise block logins
TwoFactor::delete_all_by_user(&grantor_user.uuid, &conn).await?; TwoFactor::delete_all_by_user(&grantor_user.uuid, &mut conn).await?;
// Remove grantor from all organisations unless Owner // Remove grantor from all organisations unless Owner
for user_org in UserOrganization::find_any_state_by_user(&grantor_user.uuid, &conn).await { for user_org in UserOrganization::find_any_state_by_user(&grantor_user.uuid, &mut conn).await {
if user_org.atype != UserOrgType::Owner as i32 { if user_org.atype != UserOrgType::Owner as i32 {
user_org.delete(&conn).await?; user_org.delete(&mut conn).await?;
} }
} }
Ok(()) Ok(())
@ -702,9 +688,9 @@ async fn password_emergency_access(
// endregion // endregion
#[get("/emergency-access/<emer_id>/policies")] #[get("/emergency-access/<emer_id>/policies")]
async fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { async fn policies_emergency_access(emer_id: String, headers: Headers, mut conn: DbConn) -> JsonResult {
let requesting_user = headers.user; let requesting_user = headers.user;
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &mut conn).await {
Some(emer) => emer, Some(emer) => emer,
None => err!("Emergency access not valid."), None => err!("Emergency access not valid."),
}; };
@ -713,12 +699,12 @@ async fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbCo
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Grantor user not found."), None => err!("Grantor user not found."),
}; };
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &conn); let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &mut conn);
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect(); let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
@ -751,8 +737,8 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
return; return;
} }
if let Ok(conn) = pool.get().await { if let Ok(mut conn) = pool.get().await {
let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn).await; let emergency_access_list = EmergencyAccess::find_all_recoveries(&mut conn).await;
if emergency_access_list.is_empty() { if emergency_access_list.is_empty() {
debug!("No emergency request timeout to approve"); debug!("No emergency request timeout to approve");
@ -764,16 +750,16 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
>= emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days)) >= emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days))
{ {
emer.status = EmergencyAccessStatus::RecoveryApproved as i32; emer.status = EmergencyAccessStatus::RecoveryApproved as i32;
emer.save(&conn).await.expect("Cannot save emergency access on job"); emer.save(&mut conn).await.expect("Cannot save emergency access on job");
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
// get grantor user to send Accepted email // get grantor user to send Accepted email
let grantor_user = let grantor_user =
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found."); User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found.");
// get grantee user to send Accepted email // get grantee user to send Accepted email
let grantee_user = let grantee_user =
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &conn) User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &mut conn)
.await .await
.expect("Grantee user not found."); .expect("Grantee user not found.");
@ -802,8 +788,8 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
return; return;
} }
if let Ok(conn) = pool.get().await { if let Ok(mut conn) = pool.get().await {
let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn).await; let emergency_access_list = EmergencyAccess::find_all_recoveries(&mut conn).await;
if emergency_access_list.is_empty() { if emergency_access_list.is_empty() {
debug!("No emergency request reminder notification to send"); debug!("No emergency request reminder notification to send");
@ -817,16 +803,16 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
|| (emer.last_notification_at.is_some() || (emer.last_notification_at.is_some()
&& Utc::now().naive_utc() >= emer.last_notification_at.unwrap() + Duration::days(1))) && Utc::now().naive_utc() >= emer.last_notification_at.unwrap() + Duration::days(1)))
{ {
emer.save(&conn).await.expect("Cannot save emergency access on job"); emer.save(&mut conn).await.expect("Cannot save emergency access on job");
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
// get grantor user to send Accepted email // get grantor user to send Accepted email
let grantor_user = let grantor_user =
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found."); User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found.");
// get grantee user to send Accepted email // get grantee user to send Accepted email
let grantee_user = let grantee_user =
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &conn) User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &mut conn)
.await .await
.expect("Grantee user not found."); .expect("Grantee user not found.");

View File

@ -12,8 +12,8 @@ pub fn routes() -> Vec<rocket::Route> {
} }
#[get("/folders")] #[get("/folders")]
async fn get_folders(headers: Headers, conn: DbConn) -> Json<Value> { async fn get_folders(headers: Headers, mut conn: DbConn) -> Json<Value> {
let folders = Folder::find_by_user(&headers.user.uuid, &conn).await; let folders = Folder::find_by_user(&headers.user.uuid, &mut conn).await;
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect(); let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
Json(json!({ Json(json!({
@ -24,8 +24,8 @@ async fn get_folders(headers: Headers, conn: DbConn) -> Json<Value> {
} }
#[get("/folders/<uuid>")] #[get("/folders/<uuid>")]
async fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { async fn get_folder(uuid: String, headers: Headers, mut conn: DbConn) -> JsonResult {
let folder = match Folder::find_by_uuid(&uuid, &conn).await { let folder = match Folder::find_by_uuid(&uuid, &mut conn).await {
Some(folder) => folder, Some(folder) => folder,
_ => err!("Invalid folder"), _ => err!("Invalid folder"),
}; };
@ -44,12 +44,12 @@ pub struct FolderData {
} }
#[post("/folders", data = "<data>")] #[post("/folders", data = "<data>")]
async fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
let data: FolderData = data.into_inner().data; let data: FolderData = data.into_inner().data;
let mut folder = Folder::new(headers.user.uuid, data.Name); let mut folder = Folder::new(headers.user.uuid, data.Name);
folder.save(&conn).await?; folder.save(&mut conn).await?;
nt.send_folder_update(UpdateType::FolderCreate, &folder).await; nt.send_folder_update(UpdateType::FolderCreate, &folder).await;
Ok(Json(folder.to_json())) Ok(Json(folder.to_json()))
@ -71,12 +71,12 @@ async fn put_folder(
uuid: String, uuid: String,
data: JsonUpcase<FolderData>, data: JsonUpcase<FolderData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
let data: FolderData = data.into_inner().data; let data: FolderData = data.into_inner().data;
let mut folder = match Folder::find_by_uuid(&uuid, &conn).await { let mut folder = match Folder::find_by_uuid(&uuid, &mut conn).await {
Some(folder) => folder, Some(folder) => folder,
_ => err!("Invalid folder"), _ => err!("Invalid folder"),
}; };
@ -87,7 +87,7 @@ async fn put_folder(
folder.name = data.Name; folder.name = data.Name;
folder.save(&conn).await?; folder.save(&mut conn).await?;
nt.send_folder_update(UpdateType::FolderUpdate, &folder).await; nt.send_folder_update(UpdateType::FolderUpdate, &folder).await;
Ok(Json(folder.to_json())) Ok(Json(folder.to_json()))
@ -99,8 +99,8 @@ async fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, nt: No
} }
#[delete("/folders/<uuid>")] #[delete("/folders/<uuid>")]
async fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_folder(uuid: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let folder = match Folder::find_by_uuid(&uuid, &conn).await { let folder = match Folder::find_by_uuid(&uuid, &mut conn).await {
Some(folder) => folder, Some(folder) => folder,
_ => err!("Invalid folder"), _ => err!("Invalid folder"),
}; };
@ -110,7 +110,7 @@ async fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify<
} }
// Delete the actual folder entry // Delete the actual folder entry
folder.delete(&conn).await?; folder.delete(&mut conn).await?;
nt.send_folder_update(UpdateType::FolderDelete, &folder).await; nt.send_folder_update(UpdateType::FolderDelete, &folder).await;
Ok(()) Ok(())

View File

@ -128,7 +128,7 @@ struct EquivDomainData {
} }
#[post("/settings/domains", data = "<data>")] #[post("/settings/domains", data = "<data>")]
async fn post_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: DbConn) -> JsonResult { async fn post_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EquivDomainData = data.into_inner().data; let data: EquivDomainData = data.into_inner().data;
let excluded_globals = data.ExcludedGlobalEquivalentDomains.unwrap_or_default(); let excluded_globals = data.ExcludedGlobalEquivalentDomains.unwrap_or_default();
@ -140,7 +140,7 @@ async fn post_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, co
user.excluded_globals = to_string(&excluded_globals).unwrap_or_else(|_| "[]".to_string()); user.excluded_globals = to_string(&excluded_globals).unwrap_or_else(|_| "[]".to_string());
user.equivalent_domains = to_string(&equivalent_domains).unwrap_or_else(|_| "[]".to_string()); user.equivalent_domains = to_string(&equivalent_domains).unwrap_or_else(|_| "[]".to_string());
user.save(&conn).await?; user.save(&mut conn).await?;
Ok(Json(json!({}))) Ok(Json(json!({})))
} }

File diff suppressed because it is too large Load Diff

View File

@ -39,8 +39,8 @@ pub fn routes() -> Vec<rocket::Route> {
pub async fn purge_sends(pool: DbPool) { pub async fn purge_sends(pool: DbPool) {
debug!("Purging sends"); debug!("Purging sends");
if let Ok(conn) = pool.get().await { if let Ok(mut conn) = pool.get().await {
Send::purge(&conn).await; Send::purge(&mut conn).await;
} else { } else {
error!("Failed to get DB connection while purging sends") error!("Failed to get DB connection while purging sends")
} }
@ -74,7 +74,7 @@ struct SendData {
/// ///
/// There is also a Vaultwarden-specific `sends_allowed` config setting that /// There is also a Vaultwarden-specific `sends_allowed` config setting that
/// controls this policy globally. /// controls this policy globally.
async fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult { async fn enforce_disable_send_policy(headers: &Headers, conn: &mut DbConn) -> EmptyResult {
let user_uuid = &headers.user.uuid; let user_uuid = &headers.user.uuid;
if !CONFIG.sends_allowed() if !CONFIG.sends_allowed()
|| OrgPolicy::is_applicable_to_user(user_uuid, OrgPolicyType::DisableSend, None, conn).await || OrgPolicy::is_applicable_to_user(user_uuid, OrgPolicyType::DisableSend, None, conn).await
@ -90,7 +90,7 @@ async fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyR
/// but is allowed to remove this option from an existing Send. /// but is allowed to remove this option from an existing Send.
/// ///
/// Ref: https://bitwarden.com/help/article/policies/#send-options /// Ref: https://bitwarden.com/help/article/policies/#send-options
async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &DbConn) -> EmptyResult { async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &mut DbConn) -> EmptyResult {
let user_uuid = &headers.user.uuid; let user_uuid = &headers.user.uuid;
let hide_email = data.HideEmail.unwrap_or(false); let hide_email = data.HideEmail.unwrap_or(false);
if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await { if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await {
@ -142,8 +142,8 @@ fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
} }
#[get("/sends")] #[get("/sends")]
async fn get_sends(headers: Headers, conn: DbConn) -> Json<Value> { async fn get_sends(headers: Headers, mut conn: DbConn) -> Json<Value> {
let sends = Send::find_by_user(&headers.user.uuid, &conn); let sends = Send::find_by_user(&headers.user.uuid, &mut conn);
let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect(); let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect();
Json(json!({ Json(json!({
@ -154,8 +154,8 @@ async fn get_sends(headers: Headers, conn: DbConn) -> Json<Value> {
} }
#[get("/sends/<uuid>")] #[get("/sends/<uuid>")]
async fn get_send(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { async fn get_send(uuid: String, headers: Headers, mut conn: DbConn) -> JsonResult {
let send = match Send::find_by_uuid(&uuid, &conn).await { let send = match Send::find_by_uuid(&uuid, &mut conn).await {
Some(send) => send, Some(send) => send,
None => err!("Send not found"), None => err!("Send not found"),
}; };
@ -168,19 +168,19 @@ async fn get_send(uuid: String, headers: Headers, conn: DbConn) -> JsonResult {
} }
#[post("/sends", data = "<data>")] #[post("/sends", data = "<data>")]
async fn post_send(data: JsonUpcase<SendData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_send(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data: SendData = data.into_inner().data; let data: SendData = data.into_inner().data;
enforce_disable_hide_email_policy(&data, &headers, &conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
if data.Type == SendType::File as i32 { if data.Type == SendType::File as i32 {
err!("File sends should use /api/sends/file") err!("File sends should use /api/sends/file")
} }
let mut send = create_send(data, headers.user.uuid)?; let mut send = create_send(data, headers.user.uuid)?;
send.save(&conn).await?; send.save(&mut conn).await?;
nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&conn).await).await; nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&mut conn).await).await;
Ok(Json(send.to_json())) Ok(Json(send.to_json()))
} }
@ -200,8 +200,8 @@ struct UploadDataV2<'f> {
// This method still exists to support older clients, probably need to remove it sometime. // This method still exists to support older clients, probably need to remove it sometime.
// Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L164-L167 // Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L164-L167
#[post("/sends/file", format = "multipart/form-data", data = "<data>")] #[post("/sends/file", format = "multipart/form-data", data = "<data>")]
async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let UploadData { let UploadData {
model, model,
@ -209,12 +209,12 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbCo
} = data.into_inner(); } = data.into_inner();
let model = model.into_inner().data; let model = model.into_inner().data;
enforce_disable_hide_email_policy(&model, &headers, &conn).await?; enforce_disable_hide_email_policy(&model, &headers, &mut conn).await?;
let size_limit = match CONFIG.user_attachment_limit() { let size_limit = match CONFIG.user_attachment_limit() {
Some(0) => err!("File uploads are disabled"), Some(0) => err!("File uploads are disabled"),
Some(limit_kb) => { Some(limit_kb) => {
let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &conn).await; let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &mut conn).await;
if left <= 0 { if left <= 0 {
err!("Attachment storage limit reached! Delete some attachments to free up space") err!("Attachment storage limit reached! Delete some attachments to free up space")
} }
@ -264,16 +264,16 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbCo
send.data = serde_json::to_string(&data_value)?; send.data = serde_json::to_string(&data_value)?;
// Save the changes in the database // Save the changes in the database
send.save(&conn).await?; send.save(&mut conn).await?;
nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&conn).await).await; nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&mut conn).await).await;
Ok(Json(send.to_json())) Ok(Json(send.to_json()))
} }
// Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190 // Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190
#[post("/sends/file/v2", data = "<data>")] #[post("/sends/file/v2", data = "<data>")]
async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, conn: DbConn) -> JsonResult { async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult {
enforce_disable_send_policy(&headers, &conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data = data.into_inner().data; let data = data.into_inner().data;
@ -281,7 +281,7 @@ async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, conn: D
err!("Send content is not a file"); err!("Send content is not a file");
} }
enforce_disable_hide_email_policy(&data, &headers, &conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
let file_length = match &data.FileLength { let file_length = match &data.FileLength {
Some(m) => Some(m.into_i32()?), Some(m) => Some(m.into_i32()?),
@ -291,7 +291,7 @@ async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, conn: D
let size_limit = match CONFIG.user_attachment_limit() { let size_limit = match CONFIG.user_attachment_limit() {
Some(0) => err!("File uploads are disabled"), Some(0) => err!("File uploads are disabled"),
Some(limit_kb) => { Some(limit_kb) => {
let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &conn).await; let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &mut conn).await;
if left <= 0 { if left <= 0 {
err!("Attachment storage limit reached! Delete some attachments to free up space") err!("Attachment storage limit reached! Delete some attachments to free up space")
} }
@ -315,7 +315,7 @@ async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, conn: D
o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(file_length.unwrap()))); o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(file_length.unwrap())));
} }
send.data = serde_json::to_string(&data_value)?; send.data = serde_json::to_string(&data_value)?;
send.save(&conn).await?; send.save(&mut conn).await?;
Ok(Json(json!({ Ok(Json(json!({
"fileUploadType": 0, // 0 == Direct | 1 == Azure "fileUploadType": 0, // 0 == Direct | 1 == Azure
@ -332,10 +332,10 @@ async fn post_send_file_v2_data(
file_id: String, file_id: String,
data: Form<UploadDataV2<'_>>, data: Form<UploadDataV2<'_>>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> EmptyResult { ) -> EmptyResult {
enforce_disable_send_policy(&headers, &conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let mut data = data.into_inner(); let mut data = data.into_inner();
@ -352,7 +352,7 @@ async fn post_send_file_v2_data(
err!("Error reading attachment data. Please try an other client."); err!("Error reading attachment data. Please try an other client.");
} }
if let Some(send) = Send::find_by_uuid(&send_uuid, &conn).await { if let Some(send) = Send::find_by_uuid(&send_uuid, &mut conn).await {
let folder_path = tokio::fs::canonicalize(&CONFIG.sends_folder()).await?.join(&send_uuid); let folder_path = tokio::fs::canonicalize(&CONFIG.sends_folder()).await?.join(&send_uuid);
let file_path = folder_path.join(&file_id); let file_path = folder_path.join(&file_id);
tokio::fs::create_dir_all(&folder_path).await?; tokio::fs::create_dir_all(&folder_path).await?;
@ -361,7 +361,7 @@ async fn post_send_file_v2_data(
data.data.move_copy_to(file_path).await? data.data.move_copy_to(file_path).await?
} }
nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&conn).await).await; nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&mut conn).await).await;
} else { } else {
err!("Send not found. Unable to save the file."); err!("Send not found. Unable to save the file.");
} }
@ -376,8 +376,13 @@ pub struct SendAccessData {
} }
#[post("/sends/access/<access_id>", data = "<data>")] #[post("/sends/access/<access_id>", data = "<data>")]
async fn post_access(access_id: String, data: JsonUpcase<SendAccessData>, conn: DbConn, ip: ClientIp) -> JsonResult { async fn post_access(
let mut send = match Send::find_by_access_id(&access_id, &conn).await { access_id: String,
data: JsonUpcase<SendAccessData>,
mut conn: DbConn,
ip: ClientIp,
) -> JsonResult {
let mut send = match Send::find_by_access_id(&access_id, &mut conn).await {
Some(s) => s, Some(s) => s,
None => err_code!(SEND_INACCESSIBLE_MSG, 404), None => err_code!(SEND_INACCESSIBLE_MSG, 404),
}; };
@ -415,9 +420,9 @@ async fn post_access(access_id: String, data: JsonUpcase<SendAccessData>, conn:
send.access_count += 1; send.access_count += 1;
} }
send.save(&conn).await?; send.save(&mut conn).await?;
Ok(Json(send.to_json_access(&conn).await)) Ok(Json(send.to_json_access(&mut conn).await))
} }
#[post("/sends/<send_id>/access/file/<file_id>", data = "<data>")] #[post("/sends/<send_id>/access/file/<file_id>", data = "<data>")]
@ -426,9 +431,9 @@ async fn post_access_file(
file_id: String, file_id: String,
data: JsonUpcase<SendAccessData>, data: JsonUpcase<SendAccessData>,
host: Host, host: Host,
conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let mut send = match Send::find_by_uuid(&send_id, &conn).await { let mut send = match Send::find_by_uuid(&send_id, &mut conn).await {
Some(s) => s, Some(s) => s,
None => err_code!(SEND_INACCESSIBLE_MSG, 404), None => err_code!(SEND_INACCESSIBLE_MSG, 404),
}; };
@ -463,7 +468,7 @@ async fn post_access_file(
send.access_count += 1; send.access_count += 1;
send.save(&conn).await?; send.save(&mut conn).await?;
let token_claims = crate::auth::generate_send_claims(&send_id, &file_id); let token_claims = crate::auth::generate_send_claims(&send_id, &file_id);
let token = crate::auth::encode_jwt(&token_claims); let token = crate::auth::encode_jwt(&token_claims);
@ -489,15 +494,15 @@ async fn put_send(
id: String, id: String,
data: JsonUpcase<SendData>, data: JsonUpcase<SendData>,
headers: Headers, headers: Headers,
conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> JsonResult { ) -> JsonResult {
enforce_disable_send_policy(&headers, &conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let data: SendData = data.into_inner().data; let data: SendData = data.into_inner().data;
enforce_disable_hide_email_policy(&data, &headers, &conn).await?; enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
let mut send = match Send::find_by_uuid(&id, &conn).await { let mut send = match Send::find_by_uuid(&id, &mut conn).await {
Some(s) => s, Some(s) => s,
None => err!("Send not found"), None => err!("Send not found"),
}; };
@ -544,15 +549,15 @@ async fn put_send(
send.set_password(Some(&password)); send.set_password(Some(&password));
} }
send.save(&conn).await?; send.save(&mut conn).await?;
nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn).await).await; nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&mut conn).await).await;
Ok(Json(send.to_json())) Ok(Json(send.to_json()))
} }
#[delete("/sends/<id>")] #[delete("/sends/<id>")]
async fn delete_send(id: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { async fn delete_send(id: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let send = match Send::find_by_uuid(&id, &conn).await { let send = match Send::find_by_uuid(&id, &mut conn).await {
Some(s) => s, Some(s) => s,
None => err!("Send not found"), None => err!("Send not found"),
}; };
@ -561,17 +566,17 @@ async fn delete_send(id: String, headers: Headers, conn: DbConn, nt: Notify<'_>)
err!("Send is not owned by user") err!("Send is not owned by user")
} }
send.delete(&conn).await?; send.delete(&mut conn).await?;
nt.send_send_update(UpdateType::SyncSendDelete, &send, &send.update_users_revision(&conn).await).await; nt.send_send_update(UpdateType::SyncSendDelete, &send, &send.update_users_revision(&mut conn).await).await;
Ok(()) Ok(())
} }
#[put("/sends/<id>/remove-password")] #[put("/sends/<id>/remove-password")]
async fn put_remove_password(id: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { async fn put_remove_password(id: String, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let mut send = match Send::find_by_uuid(&id, &conn).await { let mut send = match Send::find_by_uuid(&id, &mut conn).await {
Some(s) => s, Some(s) => s,
None => err!("Send not found"), None => err!("Send not found"),
}; };
@ -581,8 +586,8 @@ async fn put_remove_password(id: String, headers: Headers, conn: DbConn, nt: Not
} }
send.set_password(None); send.set_password(None);
send.save(&conn).await?; send.save(&mut conn).await?;
nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn).await).await; nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&mut conn).await).await;
Ok(Json(send.to_json())) Ok(Json(send.to_json()))
} }

View File

@ -21,7 +21,7 @@ pub fn routes() -> Vec<Route> {
} }
#[post("/two-factor/get-authenticator", data = "<data>")] #[post("/two-factor/get-authenticator", data = "<data>")]
async fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { async fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let user = headers.user; let user = headers.user;
@ -30,7 +30,7 @@ async fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers
} }
let type_ = TwoFactorType::Authenticator as i32; let type_ = TwoFactorType::Authenticator as i32;
let twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await; let twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await;
let (enabled, key) = match twofactor { let (enabled, key) = match twofactor {
Some(tf) => (true, tf.data), Some(tf) => (true, tf.data),
@ -57,7 +57,7 @@ async fn activate_authenticator(
data: JsonUpcase<EnableAuthenticatorData>, data: JsonUpcase<EnableAuthenticatorData>,
headers: Headers, headers: Headers,
ip: ClientIp, ip: ClientIp,
conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
let data: EnableAuthenticatorData = data.into_inner().data; let data: EnableAuthenticatorData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
@ -81,9 +81,9 @@ async fn activate_authenticator(
} }
// Validate the token provided with the key, and save new twofactor // Validate the token provided with the key, and save new twofactor
validate_totp_code(&user.uuid, &token, &key.to_uppercase(), &ip, &conn).await?; validate_totp_code(&user.uuid, &token, &key.to_uppercase(), &ip, &mut conn).await?;
_generate_recover_code(&mut user, &conn).await; _generate_recover_code(&mut user, &mut conn).await;
Ok(Json(json!({ Ok(Json(json!({
"Enabled": true, "Enabled": true,
@ -107,7 +107,7 @@ pub async fn validate_totp_code_str(
totp_code: &str, totp_code: &str,
secret: &str, secret: &str,
ip: &ClientIp, ip: &ClientIp,
conn: &DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
if !totp_code.chars().all(char::is_numeric) { if !totp_code.chars().all(char::is_numeric) {
err!("TOTP code is not a number"); err!("TOTP code is not a number");
@ -121,7 +121,7 @@ pub async fn validate_totp_code(
totp_code: &str, totp_code: &str,
secret: &str, secret: &str,
ip: &ClientIp, ip: &ClientIp,
conn: &DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
use totp_lite::{totp_custom, Sha1}; use totp_lite::{totp_custom, Sha1};

View File

@ -89,14 +89,14 @@ impl DuoStatus {
const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, please leave these fields untouched>"; const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, please leave these fields untouched>";
#[post("/two-factor/get-duo", data = "<data>")] #[post("/two-factor/get-duo", data = "<data>")]
async fn get_duo(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { async fn get_duo(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
if !headers.user.check_valid_password(&data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.MasterPasswordHash) {
err!("Invalid password"); err!("Invalid password");
} }
let data = get_user_duo_data(&headers.user.uuid, &conn).await; let data = get_user_duo_data(&headers.user.uuid, &mut conn).await;
let (enabled, data) = match data { let (enabled, data) = match data {
DuoStatus::Global(_) => (true, Some(DuoData::secret())), DuoStatus::Global(_) => (true, Some(DuoData::secret())),
@ -152,7 +152,7 @@ fn check_duo_fields_custom(data: &EnableDuoData) -> bool {
} }
#[post("/two-factor/duo", data = "<data>")] #[post("/two-factor/duo", data = "<data>")]
async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult { async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EnableDuoData = data.into_inner().data; let data: EnableDuoData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -171,9 +171,9 @@ async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: D
let type_ = TwoFactorType::Duo; let type_ = TwoFactorType::Duo;
let twofactor = TwoFactor::new(user.uuid.clone(), type_, data_str); let twofactor = TwoFactor::new(user.uuid.clone(), type_, data_str);
twofactor.save(&conn).await?; twofactor.save(&mut conn).await?;
_generate_recover_code(&mut user, &conn).await; _generate_recover_code(&mut user, &mut conn).await;
Ok(Json(json!({ Ok(Json(json!({
"Enabled": true, "Enabled": true,
@ -223,7 +223,7 @@ const AUTH_PREFIX: &str = "AUTH";
const DUO_PREFIX: &str = "TX"; const DUO_PREFIX: &str = "TX";
const APP_PREFIX: &str = "APP"; const APP_PREFIX: &str = "APP";
async fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus { async fn get_user_duo_data(uuid: &str, conn: &mut DbConn) -> DuoStatus {
let type_ = TwoFactorType::Duo as i32; let type_ = TwoFactorType::Duo as i32;
// If the user doesn't have an entry, disabled // If the user doesn't have an entry, disabled
@ -247,7 +247,7 @@ async fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus {
} }
// let (ik, sk, ak, host) = get_duo_keys(); // let (ik, sk, ak, host) = get_duo_keys();
async fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, String, String, String)> { async fn get_duo_keys_email(email: &str, conn: &mut DbConn) -> ApiResult<(String, String, String, String)> {
let data = match User::find_by_mail(email, conn).await { let data = match User::find_by_mail(email, conn).await {
Some(u) => get_user_duo_data(&u.uuid, conn).await.data(), Some(u) => get_user_duo_data(&u.uuid, conn).await.data(),
_ => DuoData::global(), _ => DuoData::global(),
@ -257,7 +257,7 @@ async fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, St
Ok((data.ik, data.sk, CONFIG.get_duo_akey(), data.host)) Ok((data.ik, data.sk, CONFIG.get_duo_akey(), data.host))
} }
pub async fn generate_duo_signature(email: &str, conn: &DbConn) -> ApiResult<(String, String)> { pub async fn generate_duo_signature(email: &str, conn: &mut DbConn) -> ApiResult<(String, String)> {
let now = Utc::now().timestamp(); let now = Utc::now().timestamp();
let (ik, sk, ak, host) = get_duo_keys_email(email, conn).await?; let (ik, sk, ak, host) = get_duo_keys_email(email, conn).await?;
@ -275,7 +275,7 @@ fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64
format!("{}|{}", cookie, crypto::hmac_sign(key, &cookie)) format!("{}|{}", cookie, crypto::hmac_sign(key, &cookie))
} }
pub async fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyResult { pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
// email is as entered by the user, so it needs to be normalized before // email is as entered by the user, so it needs to be normalized before
// comparison with auth_user below. // comparison with auth_user below.
let email = &email.to_lowercase(); let email = &email.to_lowercase();

View File

@ -28,13 +28,13 @@ struct SendEmailLoginData {
/// User is trying to login and wants to use email 2FA. /// User is trying to login and wants to use email 2FA.
/// Does not require Bearer token /// Does not require Bearer token
#[post("/two-factor/send-email-login", data = "<data>")] // JsonResult #[post("/two-factor/send-email-login", data = "<data>")] // JsonResult
async fn send_email_login(data: JsonUpcase<SendEmailLoginData>, conn: DbConn) -> EmptyResult { async fn send_email_login(data: JsonUpcase<SendEmailLoginData>, mut conn: DbConn) -> EmptyResult {
let data: SendEmailLoginData = data.into_inner().data; let data: SendEmailLoginData = data.into_inner().data;
use crate::db::models::User; use crate::db::models::User;
// Get the user // Get the user
let user = match User::find_by_mail(&data.Email, &conn).await { let user = match User::find_by_mail(&data.Email, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Username or password is incorrect. Try again."), None => err!("Username or password is incorrect. Try again."),
}; };
@ -48,13 +48,13 @@ async fn send_email_login(data: JsonUpcase<SendEmailLoginData>, conn: DbConn) ->
err!("Email 2FA is disabled") err!("Email 2FA is disabled")
} }
send_token(&user.uuid, &conn).await?; send_token(&user.uuid, &mut conn).await?;
Ok(()) Ok(())
} }
/// Generate the token, save the data for later verification and send email to user /// Generate the token, save the data for later verification and send email to user
pub async fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn send_token(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
let type_ = TwoFactorType::Email as i32; let type_ = TwoFactorType::Email as i32;
let mut twofactor = let mut twofactor =
TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await.map_res("Two factor not found")?; TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await.map_res("Two factor not found")?;
@ -73,7 +73,7 @@ pub async fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
/// When user clicks on Manage email 2FA show the user the related information /// When user clicks on Manage email 2FA show the user the related information
#[post("/two-factor/get-email", data = "<data>")] #[post("/two-factor/get-email", data = "<data>")]
async fn get_email(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { async fn get_email(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let user = headers.user; let user = headers.user;
@ -82,7 +82,7 @@ async fn get_email(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbCon
} }
let (enabled, mfa_email) = let (enabled, mfa_email) =
match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::Email as i32, &conn).await { match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::Email as i32, &mut conn).await {
Some(x) => { Some(x) => {
let twofactor_data = EmailTokenData::from_json(&x.data)?; let twofactor_data = EmailTokenData::from_json(&x.data)?;
(true, json!(twofactor_data.email)) (true, json!(twofactor_data.email))
@ -107,7 +107,7 @@ struct SendEmailData {
/// Send a verification email to the specified email address to check whether it exists/belongs to user. /// Send a verification email to the specified email address to check whether it exists/belongs to user.
#[post("/two-factor/send-email", data = "<data>")] #[post("/two-factor/send-email", data = "<data>")]
async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -> EmptyResult { async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: SendEmailData = data.into_inner().data; let data: SendEmailData = data.into_inner().data;
let user = headers.user; let user = headers.user;
@ -121,8 +121,8 @@ async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbC
let type_ = TwoFactorType::Email as i32; let type_ = TwoFactorType::Email as i32;
if let Some(tf) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await { if let Some(tf) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await {
tf.delete(&conn).await?; tf.delete(&mut conn).await?;
} }
let generated_token = crypto::generate_email_token(CONFIG.email_token_size()); let generated_token = crypto::generate_email_token(CONFIG.email_token_size());
@ -130,7 +130,7 @@ async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbC
// Uses EmailVerificationChallenge as type to show that it's not verified yet. // Uses EmailVerificationChallenge as type to show that it's not verified yet.
let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json()); let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json());
twofactor.save(&conn).await?; twofactor.save(&mut conn).await?;
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?).await?; mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?).await?;
@ -147,7 +147,7 @@ struct EmailData {
/// Verify email belongs to user and can be used for 2FA email codes. /// Verify email belongs to user and can be used for 2FA email codes.
#[put("/two-factor/email", data = "<data>")] #[put("/two-factor/email", data = "<data>")]
async fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonResult { async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EmailData = data.into_inner().data; let data: EmailData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -157,7 +157,7 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> J
let type_ = TwoFactorType::EmailVerificationChallenge as i32; let type_ = TwoFactorType::EmailVerificationChallenge as i32;
let mut twofactor = let mut twofactor =
TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await.map_res("Two factor not found")?; TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await.map_res("Two factor not found")?;
let mut email_data = EmailTokenData::from_json(&twofactor.data)?; let mut email_data = EmailTokenData::from_json(&twofactor.data)?;
@ -173,9 +173,9 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> J
email_data.reset_token(); email_data.reset_token();
twofactor.atype = TwoFactorType::Email as i32; twofactor.atype = TwoFactorType::Email as i32;
twofactor.data = email_data.to_json(); twofactor.data = email_data.to_json();
twofactor.save(&conn).await?; twofactor.save(&mut conn).await?;
_generate_recover_code(&mut user, &conn).await; _generate_recover_code(&mut user, &mut conn).await;
Ok(Json(json!({ Ok(Json(json!({
"Email": email_data.email, "Email": email_data.email,
@ -185,7 +185,7 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> J
} }
/// Validate the email code when used as TwoFactor token mechanism /// Validate the email code when used as TwoFactor token mechanism
pub async fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { pub async fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &mut DbConn) -> EmptyResult {
let mut email_data = EmailTokenData::from_json(data)?; let mut email_data = EmailTokenData::from_json(data)?;
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Email as i32, conn) let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Email as i32, conn)
.await .await

View File

@ -38,8 +38,8 @@ pub fn routes() -> Vec<Route> {
} }
#[get("/two-factor")] #[get("/two-factor")]
async fn get_twofactor(headers: Headers, conn: DbConn) -> Json<Value> { async fn get_twofactor(headers: Headers, mut conn: DbConn) -> Json<Value> {
let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn).await; let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &mut conn).await;
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_provider).collect(); let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_provider).collect();
Json(json!({ Json(json!({
@ -73,13 +73,13 @@ struct RecoverTwoFactor {
} }
#[post("/two-factor/recover", data = "<data>")] #[post("/two-factor/recover", data = "<data>")]
async fn recover(data: JsonUpcase<RecoverTwoFactor>, conn: DbConn) -> JsonResult { async fn recover(data: JsonUpcase<RecoverTwoFactor>, mut conn: DbConn) -> JsonResult {
let data: RecoverTwoFactor = data.into_inner().data; let data: RecoverTwoFactor = data.into_inner().data;
use crate::db::models::User; use crate::db::models::User;
// Get the user // Get the user
let mut user = match User::find_by_mail(&data.Email, &conn).await { let mut user = match User::find_by_mail(&data.Email, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Username or password is incorrect. Try again."), None => err!("Username or password is incorrect. Try again."),
}; };
@ -95,15 +95,15 @@ async fn recover(data: JsonUpcase<RecoverTwoFactor>, conn: DbConn) -> JsonResult
} }
// Remove all twofactors from the user // Remove all twofactors from the user
TwoFactor::delete_all_by_user(&user.uuid, &conn).await?; TwoFactor::delete_all_by_user(&user.uuid, &mut conn).await?;
// Remove the recovery code, not needed without twofactors // Remove the recovery code, not needed without twofactors
user.totp_recover = None; user.totp_recover = None;
user.save(&conn).await?; user.save(&mut conn).await?;
Ok(Json(json!({}))) Ok(Json(json!({})))
} }
async fn _generate_recover_code(user: &mut User, conn: &DbConn) { async fn _generate_recover_code(user: &mut User, conn: &mut DbConn) {
if user.totp_recover.is_none() { if user.totp_recover.is_none() {
let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20])); let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20]));
user.totp_recover = Some(totp_recover); user.totp_recover = Some(totp_recover);
@ -119,7 +119,7 @@ struct DisableTwoFactorData {
} }
#[post("/two-factor/disable", data = "<data>")] #[post("/two-factor/disable", data = "<data>")]
async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult { async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: DisableTwoFactorData = data.into_inner().data; let data: DisableTwoFactorData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
let user = headers.user; let user = headers.user;
@ -130,24 +130,24 @@ async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Head
let type_ = data.Type.into_i32()?; let type_ = data.Type.into_i32()?;
if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await { if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await {
twofactor.delete(&conn).await?; twofactor.delete(&mut conn).await?;
} }
let twofactor_disabled = TwoFactor::find_by_user(&user.uuid, &conn).await.is_empty(); let twofactor_disabled = TwoFactor::find_by_user(&user.uuid, &mut conn).await.is_empty();
if twofactor_disabled { if twofactor_disabled {
for user_org in for user_org in
UserOrganization::find_by_user_and_policy(&user.uuid, OrgPolicyType::TwoFactorAuthentication, &conn) UserOrganization::find_by_user_and_policy(&user.uuid, OrgPolicyType::TwoFactorAuthentication, &mut conn)
.await .await
.into_iter() .into_iter()
{ {
if user_org.atype < UserOrgType::Admin { if user_org.atype < UserOrgType::Admin {
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
let org = Organization::find_by_uuid(&user_org.org_uuid, &conn).await.unwrap(); let org = Organization::find_by_uuid(&user_org.org_uuid, &mut conn).await.unwrap();
mail::send_2fa_removed_from_org(&user.email, &org.name).await?; mail::send_2fa_removed_from_org(&user.email, &org.name).await?;
} }
user_org.delete(&conn).await?; user_org.delete(&mut conn).await?;
} }
} }
} }
@ -171,7 +171,7 @@ pub async fn send_incomplete_2fa_notifications(pool: DbPool) {
return; return;
} }
let conn = match pool.get().await { let mut conn = match pool.get().await {
Ok(conn) => conn, Ok(conn) => conn,
_ => { _ => {
error!("Failed to get DB connection in send_incomplete_2fa_notifications()"); error!("Failed to get DB connection in send_incomplete_2fa_notifications()");
@ -182,9 +182,9 @@ pub async fn send_incomplete_2fa_notifications(pool: DbPool) {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
let time_limit = Duration::minutes(CONFIG.incomplete_2fa_time_limit()); let time_limit = Duration::minutes(CONFIG.incomplete_2fa_time_limit());
let time_before = now - time_limit; let time_before = now - time_limit;
let incomplete_logins = TwoFactorIncomplete::find_logins_before(&time_before, &conn).await; let incomplete_logins = TwoFactorIncomplete::find_logins_before(&time_before, &mut conn).await;
for login in incomplete_logins { for login in incomplete_logins {
let user = User::find_by_uuid(&login.user_uuid, &conn).await.expect("User not found"); let user = User::find_by_uuid(&login.user_uuid, &mut conn).await.expect("User not found");
info!( info!(
"User {} did not complete a 2FA login within the configured time limit. IP: {}", "User {} did not complete a 2FA login within the configured time limit. IP: {}",
user.email, login.ip_address user.email, login.ip_address
@ -192,7 +192,7 @@ pub async fn send_incomplete_2fa_notifications(pool: DbPool) {
mail::send_incomplete_2fa_login(&user.email, &login.ip_address, &login.login_time, &login.device_name) mail::send_incomplete_2fa_login(&user.email, &login.ip_address, &login.login_time, &login.device_name)
.await .await
.expect("Error sending incomplete 2FA email"); .expect("Error sending incomplete 2FA email");
login.delete(&conn).await.expect("Error deleting incomplete 2FA record"); login.delete(&mut conn).await.expect("Error deleting incomplete 2FA record");
} }
} }

View File

@ -102,7 +102,7 @@ impl WebauthnRegistration {
} }
#[post("/two-factor/get-webauthn", data = "<data>")] #[post("/two-factor/get-webauthn", data = "<data>")]
async fn get_webauthn(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { async fn get_webauthn(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
if !CONFIG.domain_set() { if !CONFIG.domain_set() {
err!("`DOMAIN` environment variable is not set. Webauthn disabled") err!("`DOMAIN` environment variable is not set. Webauthn disabled")
} }
@ -111,7 +111,7 @@ async fn get_webauthn(data: JsonUpcase<PasswordData>, headers: Headers, conn: Db
err!("Invalid password"); err!("Invalid password");
} }
let (enabled, registrations) = get_webauthn_registrations(&headers.user.uuid, &conn).await?; let (enabled, registrations) = get_webauthn_registrations(&headers.user.uuid, &mut conn).await?;
let registrations_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect(); let registrations_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
@ -122,12 +122,12 @@ async fn get_webauthn(data: JsonUpcase<PasswordData>, headers: Headers, conn: Db
} }
#[post("/two-factor/get-webauthn-challenge", data = "<data>")] #[post("/two-factor/get-webauthn-challenge", data = "<data>")]
async fn generate_webauthn_challenge(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { async fn generate_webauthn_challenge(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
if !headers.user.check_valid_password(&data.data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.data.MasterPasswordHash) {
err!("Invalid password"); err!("Invalid password");
} }
let registrations = get_webauthn_registrations(&headers.user.uuid, &conn) let registrations = get_webauthn_registrations(&headers.user.uuid, &mut conn)
.await? .await?
.1 .1
.into_iter() .into_iter()
@ -144,7 +144,7 @@ async fn generate_webauthn_challenge(data: JsonUpcase<PasswordData>, headers: He
)?; )?;
let type_ = TwoFactorType::WebauthnRegisterChallenge; let type_ = TwoFactorType::WebauthnRegisterChallenge;
TwoFactor::new(headers.user.uuid, type_, serde_json::to_string(&state)?).save(&conn).await?; TwoFactor::new(headers.user.uuid, type_, serde_json::to_string(&state)?).save(&mut conn).await?;
let mut challenge_value = serde_json::to_value(challenge.public_key)?; let mut challenge_value = serde_json::to_value(challenge.public_key)?;
challenge_value["status"] = "ok".into(); challenge_value["status"] = "ok".into();
@ -241,7 +241,7 @@ impl From<PublicKeyCredentialCopy> for PublicKeyCredential {
} }
#[post("/two-factor/webauthn", data = "<data>")] #[post("/two-factor/webauthn", data = "<data>")]
async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult { async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EnableWebauthnData = data.into_inner().data; let data: EnableWebauthnData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -251,10 +251,10 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
// Retrieve and delete the saved challenge state // Retrieve and delete the saved challenge state
let type_ = TwoFactorType::WebauthnRegisterChallenge as i32; let type_ = TwoFactorType::WebauthnRegisterChallenge as i32;
let state = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await { let state = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await {
Some(tf) => { Some(tf) => {
let state: RegistrationState = serde_json::from_str(&tf.data)?; let state: RegistrationState = serde_json::from_str(&tf.data)?;
tf.delete(&conn).await?; tf.delete(&mut conn).await?;
state state
} }
None => err!("Can't recover challenge"), None => err!("Can't recover challenge"),
@ -264,7 +264,7 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
let (credential, _data) = let (credential, _data) =
WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?; WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?;
let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &conn).await?.1; let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &mut conn).await?.1;
// TODO: Check for repeated ID's // TODO: Check for repeated ID's
registrations.push(WebauthnRegistration { registrations.push(WebauthnRegistration {
id: data.Id.into_i32()?, id: data.Id.into_i32()?,
@ -276,9 +276,9 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
// Save the registrations and return them // Save the registrations and return them
TwoFactor::new(user.uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(&registrations)?) TwoFactor::new(user.uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(&registrations)?)
.save(&conn) .save(&mut conn)
.await?; .await?;
_generate_recover_code(&mut user, &conn).await; _generate_recover_code(&mut user, &mut conn).await;
let keys_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect(); let keys_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({ Ok(Json(json!({
@ -301,17 +301,17 @@ struct DeleteU2FData {
} }
#[delete("/two-factor/webauthn", data = "<data>")] #[delete("/two-factor/webauthn", data = "<data>")]
async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn: DbConn) -> JsonResult { async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let id = data.data.Id.into_i32()?; let id = data.data.Id.into_i32()?;
if !headers.user.check_valid_password(&data.data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.data.MasterPasswordHash) {
err!("Invalid password"); err!("Invalid password");
} }
let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &conn).await let mut tf =
{ match TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &mut conn).await {
Some(tf) => tf, Some(tf) => tf,
None => err!("Webauthn data not found!"), None => err!("Webauthn data not found!"),
}; };
let mut data: Vec<WebauthnRegistration> = serde_json::from_str(&tf.data)?; let mut data: Vec<WebauthnRegistration> = serde_json::from_str(&tf.data)?;
@ -322,11 +322,12 @@ async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn
let removed_item = data.remove(item_pos); let removed_item = data.remove(item_pos);
tf.data = serde_json::to_string(&data)?; tf.data = serde_json::to_string(&data)?;
tf.save(&conn).await?; tf.save(&mut conn).await?;
drop(tf); drop(tf);
// If entry is migrated from u2f, delete the u2f entry as well // If entry is migrated from u2f, delete the u2f entry as well
if let Some(mut u2f) = TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2f as i32, &conn).await if let Some(mut u2f) =
TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2f as i32, &mut conn).await
{ {
let mut data: Vec<U2FRegistration> = match serde_json::from_str(&u2f.data) { let mut data: Vec<U2FRegistration> = match serde_json::from_str(&u2f.data) {
Ok(d) => d, Ok(d) => d,
@ -337,7 +338,7 @@ async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn
let new_data_str = serde_json::to_string(&data)?; let new_data_str = serde_json::to_string(&data)?;
u2f.data = new_data_str; u2f.data = new_data_str;
u2f.save(&conn).await?; u2f.save(&mut conn).await?;
} }
let keys_json: Vec<Value> = data.iter().map(WebauthnRegistration::to_json).collect(); let keys_json: Vec<Value> = data.iter().map(WebauthnRegistration::to_json).collect();
@ -351,7 +352,7 @@ async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn
pub async fn get_webauthn_registrations( pub async fn get_webauthn_registrations(
user_uuid: &str, user_uuid: &str,
conn: &DbConn, conn: &mut DbConn,
) -> Result<(bool, Vec<WebauthnRegistration>), Error> { ) -> Result<(bool, Vec<WebauthnRegistration>), Error> {
let type_ = TwoFactorType::Webauthn as i32; let type_ = TwoFactorType::Webauthn as i32;
match TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await { match TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await {
@ -360,7 +361,7 @@ pub async fn get_webauthn_registrations(
} }
} }
pub async fn generate_webauthn_login(user_uuid: &str, conn: &DbConn) -> JsonResult { pub async fn generate_webauthn_login(user_uuid: &str, conn: &mut DbConn) -> JsonResult {
// Load saved credentials // Load saved credentials
let creds: Vec<Credential> = let creds: Vec<Credential> =
get_webauthn_registrations(user_uuid, conn).await?.1.into_iter().map(|r| r.credential).collect(); get_webauthn_registrations(user_uuid, conn).await?.1.into_iter().map(|r| r.credential).collect();
@ -382,7 +383,7 @@ pub async fn generate_webauthn_login(user_uuid: &str, conn: &DbConn) -> JsonResu
Ok(Json(serde_json::to_value(response.public_key)?)) Ok(Json(serde_json::to_value(response.public_key)?))
} }
pub async fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult { pub async fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
let type_ = TwoFactorType::WebauthnLoginChallenge as i32; let type_ = TwoFactorType::WebauthnLoginChallenge as i32;
let state = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await { let state = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await {
Some(tf) => { Some(tf) => {

View File

@ -78,7 +78,7 @@ fn verify_yubikey_otp(otp: String) -> EmptyResult {
} }
#[post("/two-factor/get-yubikey", data = "<data>")] #[post("/two-factor/get-yubikey", data = "<data>")]
async fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { async fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
// Make sure the credentials are set // Make sure the credentials are set
get_yubico_credentials()?; get_yubico_credentials()?;
@ -92,7 +92,7 @@ async fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn
let user_uuid = &user.uuid; let user_uuid = &user.uuid;
let yubikey_type = TwoFactorType::YubiKey as i32; let yubikey_type = TwoFactorType::YubiKey as i32;
let r = TwoFactor::find_by_user_and_type(user_uuid, yubikey_type, &conn).await; let r = TwoFactor::find_by_user_and_type(user_uuid, yubikey_type, &mut conn).await;
if let Some(r) = r { if let Some(r) = r {
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?; let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?;
@ -113,7 +113,7 @@ async fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn
} }
#[post("/two-factor/yubikey", data = "<data>")] #[post("/two-factor/yubikey", data = "<data>")]
async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult { async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: EnableYubikeyData = data.into_inner().data; let data: EnableYubikeyData = data.into_inner().data;
let mut user = headers.user; let mut user = headers.user;
@ -123,7 +123,7 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
// Check if we already have some data // Check if we already have some data
let mut yubikey_data = let mut yubikey_data =
match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::YubiKey as i32, &conn).await { match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::YubiKey as i32, &mut conn).await {
Some(data) => data, Some(data) => data,
None => TwoFactor::new(user.uuid.clone(), TwoFactorType::YubiKey, String::new()), None => TwoFactor::new(user.uuid.clone(), TwoFactorType::YubiKey, String::new()),
}; };
@ -155,9 +155,9 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
}; };
yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap(); yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap();
yubikey_data.save(&conn).await?; yubikey_data.save(&mut conn).await?;
_generate_recover_code(&mut user, &conn).await; _generate_recover_code(&mut user, &mut conn).await;
let mut result = jsonify_yubikeys(yubikey_metadata.Keys); let mut result = jsonify_yubikeys(yubikey_metadata.Keys);

View File

@ -55,21 +55,21 @@ async fn login(data: Form<ConnectData>, conn: DbConn, ip: ClientIp) -> JsonResul
} }
} }
async fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { async fn _refresh_login(data: ConnectData, mut conn: DbConn) -> JsonResult {
// Extract token // Extract token
let token = data.refresh_token.unwrap(); let token = data.refresh_token.unwrap();
// Get device by refresh token // Get device by refresh token
let mut device = Device::find_by_refresh_token(&token, &conn).await.map_res("Invalid refresh token")?; let mut device = Device::find_by_refresh_token(&token, &mut conn).await.map_res("Invalid refresh token")?;
let scope = "api offline_access"; let scope = "api offline_access";
let scope_vec = vec!["api".into(), "offline_access".into()]; let scope_vec = vec!["api".into(), "offline_access".into()];
// Common // Common
let user = User::find_by_uuid(&device.user_uuid, &conn).await.unwrap(); let user = User::find_by_uuid(&device.user_uuid, &mut conn).await.unwrap();
let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn).await; let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &mut conn).await;
let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec);
device.save(&conn).await?; device.save(&mut conn).await?;
Ok(Json(json!({ Ok(Json(json!({
"access_token": access_token, "access_token": access_token,
@ -87,7 +87,7 @@ async fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
}))) })))
} }
async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { async fn _password_login(data: ConnectData, mut conn: DbConn, ip: &ClientIp) -> JsonResult {
// Validate scope // Validate scope
let scope = data.scope.as_ref().unwrap(); let scope = data.scope.as_ref().unwrap();
if scope != "api offline_access" { if scope != "api offline_access" {
@ -100,7 +100,7 @@ async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> Json
// Get the user // Get the user
let username = data.username.as_ref().unwrap().trim(); let username = data.username.as_ref().unwrap().trim();
let user = match User::find_by_mail(username, &conn).await { let user = match User::find_by_mail(username, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)), None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)),
}; };
@ -131,7 +131,7 @@ async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> Json
user.last_verifying_at = Some(now); user.last_verifying_at = Some(now);
user.login_verify_count += 1; user.login_verify_count += 1;
if let Err(e) = user.save(&conn).await { if let Err(e) = user.save(&mut conn).await {
error!("Error updating user: {:#?}", e); error!("Error updating user: {:#?}", e);
} }
@ -145,9 +145,9 @@ async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> Json
err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username)) err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username))
} }
let (mut device, new_device) = get_device(&data, &conn, &user).await; let (mut device, new_device) = get_device(&data, &mut conn, &user).await;
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, ip, &conn).await?; let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, ip, &mut conn).await?;
if CONFIG.mail_enabled() && new_device { if CONFIG.mail_enabled() && new_device {
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await { if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await {
@ -160,9 +160,9 @@ async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> Json
} }
// Common // Common
let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn).await; let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &mut conn).await;
let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec);
device.save(&conn).await?; device.save(&mut conn).await?;
let mut result = json!({ let mut result = json!({
"access_token": access_token, "access_token": access_token,
@ -188,7 +188,7 @@ async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> Json
Ok(Json(result)) Ok(Json(result))
} }
async fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { async fn _api_key_login(data: ConnectData, mut conn: DbConn, ip: &ClientIp) -> JsonResult {
// Validate scope // Validate scope
let scope = data.scope.as_ref().unwrap(); let scope = data.scope.as_ref().unwrap();
if scope != "api" { if scope != "api" {
@ -205,7 +205,7 @@ async fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonR
Some(uuid) => uuid, Some(uuid) => uuid,
None => err!("Malformed client_id", format!("IP: {}.", ip.ip)), None => err!("Malformed client_id", format!("IP: {}.", ip.ip)),
}; };
let user = match User::find_by_uuid(user_uuid, &conn).await { let user = match User::find_by_uuid(user_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err!("Invalid client_id", format!("IP: {}.", ip.ip)), None => err!("Invalid client_id", format!("IP: {}.", ip.ip)),
}; };
@ -221,7 +221,7 @@ async fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonR
err!("Incorrect client_secret", format!("IP: {}. Username: {}.", ip.ip, user.email)) err!("Incorrect client_secret", format!("IP: {}. Username: {}.", ip.ip, user.email))
} }
let (mut device, new_device) = get_device(&data, &conn, &user).await; let (mut device, new_device) = get_device(&data, &mut conn, &user).await;
if CONFIG.mail_enabled() && new_device { if CONFIG.mail_enabled() && new_device {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
@ -235,9 +235,9 @@ async fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonR
} }
// Common // Common
let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn).await; let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &mut conn).await;
let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec);
device.save(&conn).await?; device.save(&mut conn).await?;
info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip);
@ -259,7 +259,7 @@ async fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonR
} }
/// Retrieves an existing device or creates a new device from ConnectData and the User /// Retrieves an existing device or creates a new device from ConnectData and the User
async fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) { async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> (Device, bool) {
// On iOS, device_type sends "iOS", on others it sends a number // On iOS, device_type sends "iOS", on others it sends a number
let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(0); let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(0);
let device_id = data.device_identifier.clone().expect("No device id provided"); let device_id = data.device_identifier.clone().expect("No device id provided");
@ -283,7 +283,7 @@ async fn twofactor_auth(
data: &ConnectData, data: &ConnectData,
device: &mut Device, device: &mut Device,
ip: &ClientIp, ip: &ClientIp,
conn: &DbConn, conn: &mut DbConn,
) -> ApiResult<Option<String>> { ) -> ApiResult<Option<String>> {
let twofactors = TwoFactor::find_by_user(user_uuid, conn).await; let twofactors = TwoFactor::find_by_user(user_uuid, conn).await;
@ -355,7 +355,7 @@ fn _selected_data(tf: Option<TwoFactor>) -> ApiResult<String> {
tf.map(|t| t.data).map_res("Two factor doesn't exist") tf.map(|t| t.data).map_res("Two factor doesn't exist")
} }
async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult<Value> { async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbConn) -> ApiResult<Value> {
use crate::api::core::two_factor; use crate::api::core::two_factor;
let mut result = json!({ let mut result = json!({

View File

@ -348,17 +348,17 @@ impl<'r> FromRequest<'r> for Headers {
let device_uuid = claims.device; let device_uuid = claims.device;
let user_uuid = claims.sub; let user_uuid = claims.sub;
let conn = match DbConn::from_request(request).await { let mut conn = match DbConn::from_request(request).await {
Outcome::Success(conn) => conn, Outcome::Success(conn) => conn,
_ => err_handler!("Error getting DB"), _ => err_handler!("Error getting DB"),
}; };
let device = match Device::find_by_uuid_and_user(&device_uuid, &user_uuid, &conn).await { let device = match Device::find_by_uuid_and_user(&device_uuid, &user_uuid, &mut conn).await {
Some(device) => device, Some(device) => device,
None => err_handler!("Invalid device id"), None => err_handler!("Invalid device id"),
}; };
let user = match User::find_by_uuid(&user_uuid, &conn).await { let user = match User::find_by_uuid(&user_uuid, &mut conn).await {
Some(user) => user, Some(user) => user,
None => err_handler!("Device has no user associated"), None => err_handler!("Device has no user associated"),
}; };
@ -380,7 +380,7 @@ impl<'r> FromRequest<'r> for Headers {
// This prevents checking this stamp exception for new requests. // This prevents checking this stamp exception for new requests.
let mut user = user; let mut user = user;
user.reset_stamp_exception(); user.reset_stamp_exception();
if let Err(e) = user.save(&conn).await { if let Err(e) = user.save(&mut conn).await {
error!("Error updating user: {:#?}", e); error!("Error updating user: {:#?}", e);
} }
err_handler!("Stamp exception is expired") err_handler!("Stamp exception is expired")
@ -438,13 +438,13 @@ impl<'r> FromRequest<'r> for OrgHeaders {
let headers = try_outcome!(Headers::from_request(request).await); let headers = try_outcome!(Headers::from_request(request).await);
match get_org_id(request) { match get_org_id(request) {
Some(org_id) => { Some(org_id) => {
let conn = match DbConn::from_request(request).await { let mut conn = match DbConn::from_request(request).await {
Outcome::Success(conn) => conn, Outcome::Success(conn) => conn,
_ => err_handler!("Error getting DB"), _ => err_handler!("Error getting DB"),
}; };
let user = headers.user; let user = headers.user;
let org_user = match UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).await { let org_user = match UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &mut conn).await {
Some(user) => { Some(user) => {
if user.status == UserOrgStatus::Confirmed as i32 { if user.status == UserOrgStatus::Confirmed as i32 {
user user
@ -550,14 +550,18 @@ impl<'r> FromRequest<'r> for ManagerHeaders {
if headers.org_user_type >= UserOrgType::Manager { if headers.org_user_type >= UserOrgType::Manager {
match get_col_id(request) { match get_col_id(request) {
Some(col_id) => { Some(col_id) => {
let conn = match DbConn::from_request(request).await { let mut conn = match DbConn::from_request(request).await {
Outcome::Success(conn) => conn, Outcome::Success(conn) => conn,
_ => err_handler!("Error getting DB"), _ => err_handler!("Error getting DB"),
}; };
if !headers.org_user.has_full_access() { if !headers.org_user.has_full_access() {
match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) match CollectionUser::find_by_collection_and_user(
.await &col_id,
&headers.org_user.user_uuid,
&mut conn,
)
.await
{ {
Some(_) => (), Some(_) => (),
None => err_handler!("The current user isn't a manager for this collection"), None => err_handler!("The current user isn't a manager for this collection"),

View File

@ -365,7 +365,7 @@ pub mod models;
/// Creates a back-up of the sqlite database /// Creates a back-up of the sqlite database
/// MySQL/MariaDB and PostgreSQL are not supported. /// MySQL/MariaDB and PostgreSQL are not supported.
pub async fn backup_database(conn: &DbConn) -> Result<(), Error> { pub async fn backup_database(conn: &mut DbConn) -> Result<(), Error> {
db_run! {@raw conn: db_run! {@raw conn:
postgresql, mysql { postgresql, mysql {
let _ = conn; let _ = conn;
@ -383,15 +383,19 @@ pub async fn backup_database(conn: &DbConn) -> Result<(), Error> {
} }
/// Get the SQL Server version /// Get the SQL Server version
pub async fn get_sql_server_version(conn: &DbConn) -> String { pub async fn get_sql_server_version(conn: &mut DbConn) -> String {
db_run! {@raw conn: db_run! {@raw conn:
postgresql, mysql { postgresql, mysql {
no_arg_sql_function!(version, diesel::sql_types::Text); sql_function!{
diesel::select(version).get_result::<String>(conn).unwrap_or_else(|_| "Unknown".to_string()) fn version() -> diesel::sql_types::Text;
}
diesel::select(version()).get_result::<String>(conn).unwrap_or_else(|_| "Unknown".to_string())
} }
sqlite { sqlite {
no_arg_sql_function!(sqlite_version, diesel::sql_types::Text); sql_function!{
diesel::select(sqlite_version).get_result::<String>(conn).unwrap_or_else(|_| "Unknown".to_string()) fn sqlite_version() -> diesel::sql_types::Text;
}
diesel::select(sqlite_version()).get_result::<String>(conn).unwrap_or_else(|_| "Unknown".to_string())
} }
} }
} }
@ -416,7 +420,8 @@ impl<'r> FromRequest<'r> for DbConn {
// https://docs.rs/diesel_migrations/*/diesel_migrations/macro.embed_migrations.html // https://docs.rs/diesel_migrations/*/diesel_migrations/macro.embed_migrations.html
#[cfg(sqlite)] #[cfg(sqlite)]
mod sqlite_migrations { mod sqlite_migrations {
embed_migrations!("migrations/sqlite"); use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/sqlite");
pub fn run_migrations() -> Result<(), super::Error> { pub fn run_migrations() -> Result<(), super::Error> {
// Make sure the directory exists // Make sure the directory exists
@ -432,52 +437,54 @@ mod sqlite_migrations {
use diesel::{Connection, RunQueryDsl}; use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; let mut connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// Scoped to a connection. // Scoped to a connection.
diesel::sql_query("PRAGMA foreign_keys = OFF") diesel::sql_query("PRAGMA foreign_keys = OFF")
.execute(&connection) .execute(&mut connection)
.expect("Failed to disable Foreign Key Checks during migrations"); .expect("Failed to disable Foreign Key Checks during migrations");
// Turn on WAL in SQLite // Turn on WAL in SQLite
if crate::CONFIG.enable_db_wal() { if crate::CONFIG.enable_db_wal() {
diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL"); diesel::sql_query("PRAGMA journal_mode=wal").execute(&mut connection).expect("Failed to turn on WAL");
} }
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; connection.run_pending_migrations(MIGRATIONS).expect("Error running migrations");
Ok(()) Ok(())
} }
} }
#[cfg(mysql)] #[cfg(mysql)]
mod mysql_migrations { mod mysql_migrations {
embed_migrations!("migrations/mysql"); use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/mysql");
pub fn run_migrations() -> Result<(), super::Error> { pub fn run_migrations() -> Result<(), super::Error> {
use diesel::{Connection, RunQueryDsl}; use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; let mut connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// Scoped to a connection/session. // Scoped to a connection/session.
diesel::sql_query("SET FOREIGN_KEY_CHECKS = 0") diesel::sql_query("SET FOREIGN_KEY_CHECKS = 0")
.execute(&connection) .execute(&mut connection)
.expect("Failed to disable Foreign Key Checks during migrations"); .expect("Failed to disable Foreign Key Checks during migrations");
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; connection.run_pending_migrations(MIGRATIONS).expect("Error running migrations");
Ok(()) Ok(())
} }
} }
#[cfg(postgresql)] #[cfg(postgresql)]
mod postgresql_migrations { mod postgresql_migrations {
embed_migrations!("migrations/postgresql"); use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/postgresql");
pub fn run_migrations() -> Result<(), super::Error> { pub fn run_migrations() -> Result<(), super::Error> {
use diesel::{Connection, RunQueryDsl}; use diesel::{Connection, RunQueryDsl};
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; let mut connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html,
@ -487,10 +494,10 @@ mod postgresql_migrations {
// Migrations that need to disable foreign key checks should run this // Migrations that need to disable foreign key checks should run this
// from within the migration script itself. // from within the migration script itself.
diesel::sql_query("SET CONSTRAINTS ALL DEFERRED") diesel::sql_query("SET CONSTRAINTS ALL DEFERRED")
.execute(&connection) .execute(&mut connection)
.expect("Failed to disable Foreign Key Checks during migrations"); .expect("Failed to disable Foreign Key Checks during migrations");
embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; connection.run_pending_migrations(MIGRATIONS).expect("Error running migrations");
Ok(()) Ok(())
} }
} }

View File

@ -6,9 +6,9 @@ use crate::CONFIG;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "attachments"] #[diesel(table_name = attachments)]
#[changeset_options(treat_none_as_null="true")] #[diesel(treat_none_as_null = true)]
#[primary_key(id)] #[diesel(primary_key(id))]
pub struct Attachment { pub struct Attachment {
pub id: String, pub id: String,
pub cipher_uuid: String, pub cipher_uuid: String,
@ -58,7 +58,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Attachment { impl Attachment {
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: db_run! { conn:
sqlite, mysql { sqlite, mysql {
match diesel::replace_into(attachments::table) match diesel::replace_into(attachments::table)
@ -90,7 +90,7 @@ impl Attachment {
} }
} }
pub async fn delete(&self, conn: &DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
crate::util::retry( crate::util::retry(
|| diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn), || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn),
@ -114,14 +114,14 @@ impl Attachment {
}} }}
} }
pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for attachment in Attachment::find_by_cipher(cipher_uuid, conn).await { for attachment in Attachment::find_by_cipher(cipher_uuid, conn).await {
attachment.delete(conn).await?; attachment.delete(conn).await?;
} }
Ok(()) Ok(())
} }
pub async fn find_by_id(id: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_id(id: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
attachments::table attachments::table
.filter(attachments::id.eq(id.to_lowercase())) .filter(attachments::id.eq(id.to_lowercase()))
@ -131,7 +131,7 @@ impl Attachment {
}} }}
} }
pub async fn find_by_cipher(cipher_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_cipher(cipher_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
attachments::table attachments::table
.filter(attachments::cipher_uuid.eq(cipher_uuid)) .filter(attachments::cipher_uuid.eq(cipher_uuid))
@ -141,7 +141,7 @@ impl Attachment {
}} }}
} }
pub async fn size_by_user(user_uuid: &str, conn: &DbConn) -> i64 { pub async fn size_by_user(user_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
let result: Option<i64> = attachments::table let result: Option<i64> = attachments::table
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
@ -153,7 +153,7 @@ impl Attachment {
}} }}
} }
pub async fn count_by_user(user_uuid: &str, conn: &DbConn) -> i64 { pub async fn count_by_user(user_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
attachments::table attachments::table
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
@ -164,7 +164,7 @@ impl Attachment {
}} }}
} }
pub async fn size_by_org(org_uuid: &str, conn: &DbConn) -> i64 { pub async fn size_by_org(org_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
let result: Option<i64> = attachments::table let result: Option<i64> = attachments::table
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
@ -176,7 +176,7 @@ impl Attachment {
}} }}
} }
pub async fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { pub async fn count_by_org(org_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
attachments::table attachments::table
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
@ -187,7 +187,7 @@ impl Attachment {
}} }}
} }
pub async fn find_all_by_ciphers(cipher_uuids: &Vec<String>, conn: &DbConn) -> Vec<Self> { pub async fn find_all_by_ciphers(cipher_uuids: &Vec<String>, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
attachments::table attachments::table
.filter(attachments::cipher_uuid.eq_any(cipher_uuids)) .filter(attachments::cipher_uuid.eq_any(cipher_uuids))

View File

@ -12,9 +12,9 @@ use std::borrow::Cow;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "ciphers"] #[diesel(table_name = ciphers)]
#[changeset_options(treat_none_as_null="true")] #[diesel(treat_none_as_null = true)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct Cipher { pub struct Cipher {
pub uuid: String, pub uuid: String,
pub created_at: NaiveDateTime, pub created_at: NaiveDateTime,
@ -87,7 +87,7 @@ impl Cipher {
host: &str, host: &str,
user_uuid: &str, user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>, cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn, conn: &mut DbConn,
) -> Value { ) -> Value {
use crate::util::format_date; use crate::util::format_date;
@ -148,7 +148,7 @@ impl Cipher {
Cow::from(Vec::with_capacity(0)) Cow::from(Vec::with_capacity(0))
} }
} else { } else {
Cow::from(self.get_collections(user_uuid, conn).await) Cow::from(self.get_collections(user_uuid.to_string(), conn).await)
}; };
// There are three types of cipher response models in upstream // There are three types of cipher response models in upstream
@ -210,7 +210,7 @@ impl Cipher {
json_object json_object
} }
pub async fn update_users_revision(&self, conn: &DbConn) -> Vec<String> { pub async fn update_users_revision(&self, conn: &mut DbConn) -> Vec<String> {
let mut user_uuids = Vec::new(); let mut user_uuids = Vec::new();
match self.user_uuid { match self.user_uuid {
Some(ref user_uuid) => { Some(ref user_uuid) => {
@ -230,7 +230,7 @@ impl Cipher {
user_uuids user_uuids
} }
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
@ -265,7 +265,7 @@ impl Cipher {
} }
} }
pub async fn delete(&self, conn: &DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
FolderCipher::delete_all_by_cipher(&self.uuid, conn).await?; FolderCipher::delete_all_by_cipher(&self.uuid, conn).await?;
@ -280,7 +280,7 @@ impl Cipher {
}} }}
} }
pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_organization(org_uuid: &str, conn: &mut DbConn) -> EmptyResult {
// TODO: Optimize this by executing a DELETE directly on the database, instead of first fetching. // TODO: Optimize this by executing a DELETE directly on the database, instead of first fetching.
for cipher in Self::find_by_org(org_uuid, conn).await { for cipher in Self::find_by_org(org_uuid, conn).await {
cipher.delete(conn).await?; cipher.delete(conn).await?;
@ -288,7 +288,7 @@ impl Cipher {
Ok(()) Ok(())
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for cipher in Self::find_owned_by_user(user_uuid, conn).await { for cipher in Self::find_owned_by_user(user_uuid, conn).await {
cipher.delete(conn).await?; cipher.delete(conn).await?;
} }
@ -296,7 +296,7 @@ impl Cipher {
} }
/// Purge all ciphers that are old enough to be auto-deleted. /// Purge all ciphers that are old enough to be auto-deleted.
pub async fn purge_trash(conn: &DbConn) { pub async fn purge_trash(conn: &mut DbConn) {
if let Some(auto_delete_days) = CONFIG.trash_auto_delete_days() { if let Some(auto_delete_days) = CONFIG.trash_auto_delete_days() {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
let dt = now - Duration::days(auto_delete_days); let dt = now - Duration::days(auto_delete_days);
@ -306,7 +306,7 @@ impl Cipher {
} }
} }
pub async fn move_to_folder(&self, folder_uuid: Option<String>, user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn move_to_folder(&self, folder_uuid: Option<String>, user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(user_uuid, conn).await; User::update_uuid_revision(user_uuid, conn).await;
match (self.get_folder_uuid(user_uuid, conn).await, folder_uuid) { match (self.get_folder_uuid(user_uuid, conn).await, folder_uuid) {
@ -343,7 +343,7 @@ impl Cipher {
&self, &self,
user_uuid: &str, user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>, cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn, conn: &mut DbConn,
) -> bool { ) -> bool {
if let Some(ref org_uuid) = self.organization_uuid { if let Some(ref org_uuid) = self.organization_uuid {
if let Some(cipher_sync_data) = cipher_sync_data { if let Some(cipher_sync_data) = cipher_sync_data {
@ -362,7 +362,7 @@ impl Cipher {
&self, &self,
user_uuid: &str, user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>, cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn, conn: &mut DbConn,
) -> bool { ) -> bool {
if let Some(ref org_uuid) = self.organization_uuid { if let Some(ref org_uuid) = self.organization_uuid {
if let Some(cipher_sync_data) = cipher_sync_data { if let Some(cipher_sync_data) = cipher_sync_data {
@ -383,7 +383,7 @@ impl Cipher {
&self, &self,
user_uuid: &str, user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>, cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn, conn: &mut DbConn,
) -> Option<(bool, bool)> { ) -> Option<(bool, bool)> {
// Check whether this cipher is directly owned by the user, or is in // Check whether this cipher is directly owned by the user, or is in
// a collection that the user has full access to. If so, there are no // a collection that the user has full access to. If so, there are no
@ -441,7 +441,7 @@ impl Cipher {
Some((read_only, hide_passwords)) Some((read_only, hide_passwords))
} }
async fn get_user_collections_access_flags(&self, user_uuid: &str, conn: &DbConn) -> Vec<(bool, bool)> { async fn get_user_collections_access_flags(&self, user_uuid: &str, conn: &mut DbConn) -> Vec<(bool, bool)> {
db_run! {conn: { db_run! {conn: {
// Check whether this cipher is in any collections accessible to the // Check whether this cipher is in any collections accessible to the
// user. If so, retrieve the access flags for each collection. // user. If so, retrieve the access flags for each collection.
@ -458,7 +458,7 @@ impl Cipher {
}} }}
} }
async fn get_group_collections_access_flags(&self, user_uuid: &str, conn: &DbConn) -> Vec<(bool, bool)> { async fn get_group_collections_access_flags(&self, user_uuid: &str, conn: &mut DbConn) -> Vec<(bool, bool)> {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter(ciphers::uuid.eq(&self.uuid)) .filter(ciphers::uuid.eq(&self.uuid))
@ -481,31 +481,31 @@ impl Cipher {
}} }}
} }
pub async fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &mut DbConn) -> bool {
match self.get_access_restrictions(user_uuid, None, conn).await { match self.get_access_restrictions(user_uuid, None, conn).await {
Some((read_only, _hide_passwords)) => !read_only, Some((read_only, _hide_passwords)) => !read_only,
None => false, None => false,
} }
} }
pub async fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_accessible_to_user(&self, user_uuid: &str, conn: &mut DbConn) -> bool {
self.get_access_restrictions(user_uuid, None, conn).await.is_some() self.get_access_restrictions(user_uuid, None, conn).await.is_some()
} }
// Returns whether this cipher is a favorite of the specified user. // Returns whether this cipher is a favorite of the specified user.
pub async fn is_favorite(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_favorite(&self, user_uuid: &str, conn: &mut DbConn) -> bool {
Favorite::is_favorite(&self.uuid, user_uuid, conn).await Favorite::is_favorite(&self.uuid, user_uuid, conn).await
} }
// Sets whether this cipher is a favorite of the specified user. // Sets whether this cipher is a favorite of the specified user.
pub async fn set_favorite(&self, favorite: Option<bool>, user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn set_favorite(&self, favorite: Option<bool>, user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
match favorite { match favorite {
None => Ok(()), // No change requested. None => Ok(()), // No change requested.
Some(status) => Favorite::set_favorite(status, &self.uuid, user_uuid, conn).await, Some(status) => Favorite::set_favorite(status, &self.uuid, user_uuid, conn).await,
} }
} }
pub async fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> { pub async fn get_folder_uuid(&self, user_uuid: &str, conn: &mut DbConn) -> Option<String> {
db_run! {conn: { db_run! {conn: {
folders_ciphers::table folders_ciphers::table
.inner_join(folders::table) .inner_join(folders::table)
@ -517,7 +517,7 @@ impl Cipher {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter(ciphers::uuid.eq(uuid)) .filter(ciphers::uuid.eq(uuid))
@ -539,7 +539,7 @@ impl Cipher {
// true, then the non-interesting ciphers will not be returned. As a // true, then the non-interesting ciphers will not be returned. As a
// result, those ciphers will not appear in "My Vault" for the org // result, those ciphers will not appear in "My Vault" for the org
// owner/admin, but they can still be accessed via the org vault view. // owner/admin, but they can still be accessed via the org vault view.
pub async fn find_by_user(user_uuid: &str, visible_only: bool, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, visible_only: bool, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
let mut query = ciphers::table let mut query = ciphers::table
.left_join(ciphers_collections::table.on( .left_join(ciphers_collections::table.on(
@ -587,12 +587,12 @@ impl Cipher {
} }
// Find all ciphers visible to the specified user. // Find all ciphers visible to the specified user.
pub async fn find_by_user_visible(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user_visible(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
Self::find_by_user(user_uuid, true, conn).await Self::find_by_user(user_uuid, true, conn).await
} }
// Find all ciphers directly owned by the specified user. // Find all ciphers directly owned by the specified user.
pub async fn find_owned_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_owned_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter( .filter(
@ -603,7 +603,7 @@ impl Cipher {
}} }}
} }
pub async fn count_owned_by_user(user_uuid: &str, conn: &DbConn) -> i64 { pub async fn count_owned_by_user(user_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter(ciphers::user_uuid.eq(user_uuid)) .filter(ciphers::user_uuid.eq(user_uuid))
@ -614,7 +614,7 @@ impl Cipher {
}} }}
} }
pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_org(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter(ciphers::organization_uuid.eq(org_uuid)) .filter(ciphers::organization_uuid.eq(org_uuid))
@ -622,7 +622,7 @@ impl Cipher {
}} }}
} }
pub async fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { pub async fn count_by_org(org_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter(ciphers::organization_uuid.eq(org_uuid)) .filter(ciphers::organization_uuid.eq(org_uuid))
@ -633,7 +633,7 @@ impl Cipher {
}} }}
} }
pub async fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_folder(folder_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
folders_ciphers::table.inner_join(ciphers::table) folders_ciphers::table.inner_join(ciphers::table)
.filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .filter(folders_ciphers::folder_uuid.eq(folder_uuid))
@ -643,7 +643,7 @@ impl Cipher {
} }
/// Find all ciphers that were deleted before the specified datetime. /// Find all ciphers that were deleted before the specified datetime.
pub async fn find_deleted_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec<Self> { pub async fn find_deleted_before(dt: &NaiveDateTime, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
ciphers::table ciphers::table
.filter(ciphers::deleted_at.lt(dt)) .filter(ciphers::deleted_at.lt(dt))
@ -651,7 +651,7 @@ impl Cipher {
}} }}
} }
pub async fn get_collections(&self, user_id: &str, conn: &DbConn) -> Vec<String> { pub async fn get_collections(&self, user_id: String, conn: &mut DbConn) -> Vec<String> {
db_run! {conn: { db_run! {conn: {
ciphers_collections::table ciphers_collections::table
.inner_join(collections::table.on( .inner_join(collections::table.on(
@ -659,12 +659,12 @@ impl Cipher {
)) ))
.inner_join(users_organizations::table.on( .inner_join(users_organizations::table.on(
users_organizations::org_uuid.eq(collections::org_uuid).and( users_organizations::org_uuid.eq(collections::org_uuid).and(
users_organizations::user_uuid.eq(user_id) users_organizations::user_uuid.eq(user_id.clone())
) )
)) ))
.left_join(users_collections::table.on( .left_join(users_collections::table.on(
users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and( users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and(
users_collections::user_uuid.eq(user_id) users_collections::user_uuid.eq(user_id.clone())
) )
)) ))
.filter(ciphers_collections::cipher_uuid.eq(&self.uuid)) .filter(ciphers_collections::cipher_uuid.eq(&self.uuid))
@ -680,7 +680,7 @@ impl Cipher {
/// Return a Vec with (cipher_uuid, collection_uuid) /// Return a Vec with (cipher_uuid, collection_uuid)
/// This is used during a full sync so we only need one query for all collections accessible. /// This is used during a full sync so we only need one query for all collections accessible.
pub async fn get_collections_with_cipher_by_user(user_id: &str, conn: &DbConn) -> Vec<(String, String)> { pub async fn get_collections_with_cipher_by_user(user_id: String, conn: &mut DbConn) -> Vec<(String, String)> {
db_run! {conn: { db_run! {conn: {
ciphers_collections::table ciphers_collections::table
.inner_join(collections::table.on( .inner_join(collections::table.on(
@ -688,12 +688,12 @@ impl Cipher {
)) ))
.inner_join(users_organizations::table.on( .inner_join(users_organizations::table.on(
users_organizations::org_uuid.eq(collections::org_uuid).and( users_organizations::org_uuid.eq(collections::org_uuid).and(
users_organizations::user_uuid.eq(user_id) users_organizations::user_uuid.eq(user_id.clone())
) )
)) ))
.left_join(users_collections::table.on( .left_join(users_collections::table.on(
users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and( users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and(
users_collections::user_uuid.eq(user_id) users_collections::user_uuid.eq(user_id.clone())
) )
)) ))
.left_join(groups_users::table.on( .left_join(groups_users::table.on(

View File

@ -4,8 +4,8 @@ use super::{CollectionGroup, User, UserOrgStatus, UserOrgType, UserOrganization}
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "collections"] #[diesel(table_name = collections)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct Collection { pub struct Collection {
pub uuid: String, pub uuid: String,
pub org_uuid: String, pub org_uuid: String,
@ -13,8 +13,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "users_collections"] #[diesel(table_name = users_collections)]
#[primary_key(user_uuid, collection_uuid)] #[diesel(primary_key(user_uuid, collection_uuid))]
pub struct CollectionUser { pub struct CollectionUser {
pub user_uuid: String, pub user_uuid: String,
pub collection_uuid: String, pub collection_uuid: String,
@ -23,8 +23,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "ciphers_collections"] #[diesel(table_name = ciphers_collections)]
#[primary_key(cipher_uuid, collection_uuid)] #[diesel(primary_key(cipher_uuid, collection_uuid))]
pub struct CollectionCipher { pub struct CollectionCipher {
pub cipher_uuid: String, pub cipher_uuid: String,
pub collection_uuid: String, pub collection_uuid: String,
@ -56,7 +56,7 @@ impl Collection {
&self, &self,
user_uuid: &str, user_uuid: &str,
cipher_sync_data: Option<&crate::api::core::CipherSyncData>, cipher_sync_data: Option<&crate::api::core::CipherSyncData>,
conn: &DbConn, conn: &mut DbConn,
) -> Value { ) -> Value {
let (read_only, hide_passwords) = if let Some(cipher_sync_data) = cipher_sync_data { let (read_only, hide_passwords) = if let Some(cipher_sync_data) = cipher_sync_data {
match cipher_sync_data.user_organizations.get(&self.org_uuid) { match cipher_sync_data.user_organizations.get(&self.org_uuid) {
@ -89,7 +89,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Collection { impl Collection {
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
db_run! { conn: db_run! { conn:
@ -123,7 +123,7 @@ impl Collection {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
CollectionCipher::delete_all_by_collection(&self.uuid, conn).await?; CollectionCipher::delete_all_by_collection(&self.uuid, conn).await?;
CollectionUser::delete_all_by_collection(&self.uuid, conn).await?; CollectionUser::delete_all_by_collection(&self.uuid, conn).await?;
@ -136,20 +136,20 @@ impl Collection {
}} }}
} }
pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_organization(org_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for collection in Self::find_by_organization(org_uuid, conn).await { for collection in Self::find_by_organization(org_uuid, conn).await {
collection.delete(conn).await?; collection.delete(conn).await?;
} }
Ok(()) Ok(())
} }
pub async fn update_users_revision(&self, conn: &DbConn) { pub async fn update_users_revision(&self, conn: &mut DbConn) {
for user_org in UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).await.iter() { for user_org in UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).await.iter() {
User::update_uuid_revision(&user_org.user_uuid, conn).await; User::update_uuid_revision(&user_org.user_uuid, conn).await;
} }
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
collections::table collections::table
.filter(collections::uuid.eq(uuid)) .filter(collections::uuid.eq(uuid))
@ -159,17 +159,17 @@ impl Collection {
}} }}
} }
pub async fn find_by_user_uuid(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user_uuid(user_uuid: String, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
collections::table collections::table
.left_join(users_collections::table.on( .left_join(users_collections::table.on(
users_collections::collection_uuid.eq(collections::uuid).and( users_collections::collection_uuid.eq(collections::uuid).and(
users_collections::user_uuid.eq(user_uuid) users_collections::user_uuid.eq(user_uuid.clone())
) )
)) ))
.left_join(users_organizations::table.on( .left_join(users_organizations::table.on(
collections::org_uuid.eq(users_organizations::org_uuid).and( collections::org_uuid.eq(users_organizations::org_uuid).and(
users_organizations::user_uuid.eq(user_uuid) users_organizations::user_uuid.eq(user_uuid.clone())
) )
)) ))
.left_join(groups_users::table.on( .left_join(groups_users::table.on(
@ -203,11 +203,15 @@ impl Collection {
}} }}
} }
pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
Self::find_by_user_uuid(user_uuid, conn).await.into_iter().filter(|c| c.org_uuid == org_uuid).collect() Self::find_by_user_uuid(user_uuid.to_owned(), conn)
.await
.into_iter()
.filter(|c| c.org_uuid == org_uuid)
.collect()
} }
pub async fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_organization(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
collections::table collections::table
.filter(collections::org_uuid.eq(org_uuid)) .filter(collections::org_uuid.eq(org_uuid))
@ -217,7 +221,7 @@ impl Collection {
}} }}
} }
pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
collections::table collections::table
.filter(collections::uuid.eq(uuid)) .filter(collections::uuid.eq(uuid))
@ -229,12 +233,12 @@ impl Collection {
}} }}
} }
pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: String, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
collections::table collections::table
.left_join(users_collections::table.on( .left_join(users_collections::table.on(
users_collections::collection_uuid.eq(collections::uuid).and( users_collections::collection_uuid.eq(collections::uuid).and(
users_collections::user_uuid.eq(user_uuid) users_collections::user_uuid.eq(user_uuid.clone())
) )
)) ))
.left_join(users_organizations::table.on( .left_join(users_organizations::table.on(
@ -255,7 +259,7 @@ impl Collection {
}} }}
} }
pub async fn is_writable_by_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_writable_by_user(&self, user_uuid: &str, conn: &mut DbConn) -> bool {
match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn).await { match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn).await {
None => false, // Not in Org None => false, // Not in Org
Some(user_org) => { Some(user_org) => {
@ -277,7 +281,7 @@ impl Collection {
} }
} }
pub async fn hide_passwords_for_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn hide_passwords_for_user(&self, user_uuid: &str, conn: &mut DbConn) -> bool {
match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn).await { match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn).await {
None => true, // Not in Org None => true, // Not in Org
Some(user_org) => { Some(user_org) => {
@ -302,7 +306,7 @@ impl Collection {
/// Database methods /// Database methods
impl CollectionUser { impl CollectionUser {
pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_collections::table users_collections::table
.filter(users_collections::user_uuid.eq(user_uuid)) .filter(users_collections::user_uuid.eq(user_uuid))
@ -320,7 +324,7 @@ impl CollectionUser {
collection_uuid: &str, collection_uuid: &str,
read_only: bool, read_only: bool,
hide_passwords: bool, hide_passwords: bool,
conn: &DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
User::update_uuid_revision(user_uuid, conn).await; User::update_uuid_revision(user_uuid, conn).await;
@ -373,7 +377,7 @@ impl CollectionUser {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.user_uuid, conn).await; User::update_uuid_revision(&self.user_uuid, conn).await;
db_run! { conn: { db_run! { conn: {
@ -387,7 +391,7 @@ impl CollectionUser {
}} }}
} }
pub async fn find_by_collection(collection_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_collection(collection_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_collections::table users_collections::table
.filter(users_collections::collection_uuid.eq(collection_uuid)) .filter(users_collections::collection_uuid.eq(collection_uuid))
@ -398,7 +402,11 @@ impl CollectionUser {
}} }}
} }
pub async fn find_by_collection_and_user(collection_uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_collection_and_user(
collection_uuid: &str,
user_uuid: &str,
conn: &mut DbConn,
) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
users_collections::table users_collections::table
.filter(users_collections::collection_uuid.eq(collection_uuid)) .filter(users_collections::collection_uuid.eq(collection_uuid))
@ -410,7 +418,7 @@ impl CollectionUser {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_collections::table users_collections::table
.filter(users_collections::user_uuid.eq(user_uuid)) .filter(users_collections::user_uuid.eq(user_uuid))
@ -421,7 +429,7 @@ impl CollectionUser {
}} }}
} }
pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_collection(collection_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for collection in CollectionUser::find_by_collection(collection_uuid, conn).await.iter() { for collection in CollectionUser::find_by_collection(collection_uuid, conn).await.iter() {
User::update_uuid_revision(&collection.user_uuid, conn).await; User::update_uuid_revision(&collection.user_uuid, conn).await;
} }
@ -433,7 +441,7 @@ impl CollectionUser {
}} }}
} }
pub async fn delete_all_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &mut DbConn) -> EmptyResult {
let collectionusers = Self::find_by_organization_and_user_uuid(org_uuid, user_uuid, conn).await; let collectionusers = Self::find_by_organization_and_user_uuid(org_uuid, user_uuid, conn).await;
db_run! { conn: { db_run! { conn: {
@ -452,7 +460,7 @@ impl CollectionUser {
/// Database methods /// Database methods
impl CollectionCipher { impl CollectionCipher {
pub async fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn save(cipher_uuid: &str, collection_uuid: &str, conn: &mut DbConn) -> EmptyResult {
Self::update_users_revision(collection_uuid, conn).await; Self::update_users_revision(collection_uuid, conn).await;
db_run! { conn: db_run! { conn:
@ -482,7 +490,7 @@ impl CollectionCipher {
} }
} }
pub async fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &mut DbConn) -> EmptyResult {
Self::update_users_revision(collection_uuid, conn).await; Self::update_users_revision(collection_uuid, conn).await;
db_run! { conn: { db_run! { conn: {
@ -496,7 +504,7 @@ impl CollectionCipher {
}} }}
} }
pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(ciphers_collections::table.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid))) diesel::delete(ciphers_collections::table.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)))
.execute(conn) .execute(conn)
@ -504,7 +512,7 @@ impl CollectionCipher {
}} }}
} }
pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_collection(collection_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(ciphers_collections::table.filter(ciphers_collections::collection_uuid.eq(collection_uuid))) diesel::delete(ciphers_collections::table.filter(ciphers_collections::collection_uuid.eq(collection_uuid)))
.execute(conn) .execute(conn)
@ -512,7 +520,7 @@ impl CollectionCipher {
}} }}
} }
pub async fn update_users_revision(collection_uuid: &str, conn: &DbConn) { pub async fn update_users_revision(collection_uuid: &str, conn: &mut DbConn) {
if let Some(collection) = Collection::find_by_uuid(collection_uuid, conn).await { if let Some(collection) = Collection::find_by_uuid(collection_uuid, conn).await {
collection.update_users_revision(conn).await; collection.update_users_revision(conn).await;
} }

View File

@ -4,9 +4,9 @@ use crate::CONFIG;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "devices"] #[diesel(table_name = devices)]
#[changeset_options(treat_none_as_null="true")] #[diesel(treat_none_as_null = true)]
#[primary_key(uuid, user_uuid)] #[diesel(primary_key(uuid, user_uuid))]
pub struct Device { pub struct Device {
pub uuid: String, pub uuid: String,
pub created_at: NaiveDateTime, pub created_at: NaiveDateTime,
@ -116,7 +116,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Device { impl Device {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
db_run! { conn: db_run! { conn:
@ -136,7 +136,7 @@ impl Device {
} }
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(devices::table.filter(devices::user_uuid.eq(user_uuid))) diesel::delete(devices::table.filter(devices::user_uuid.eq(user_uuid)))
.execute(conn) .execute(conn)
@ -144,7 +144,7 @@ impl Device {
}} }}
} }
pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
devices::table devices::table
.filter(devices::uuid.eq(uuid)) .filter(devices::uuid.eq(uuid))
@ -155,7 +155,7 @@ impl Device {
}} }}
} }
pub async fn find_by_refresh_token(refresh_token: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_refresh_token(refresh_token: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
devices::table devices::table
.filter(devices::refresh_token.eq(refresh_token)) .filter(devices::refresh_token.eq(refresh_token))
@ -165,7 +165,7 @@ impl Device {
}} }}
} }
pub async fn find_latest_active_by_user(user_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_latest_active_by_user(user_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
devices::table devices::table
.filter(devices::user_uuid.eq(user_uuid)) .filter(devices::user_uuid.eq(user_uuid))

View File

@ -5,9 +5,9 @@ use super::User;
db_object! { db_object! {
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "emergency_access"] #[diesel(table_name = emergency_access)]
#[changeset_options(treat_none_as_null="true")] #[diesel(treat_none_as_null = true)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct EmergencyAccess { pub struct EmergencyAccess {
pub uuid: String, pub uuid: String,
pub grantor_uuid: String, pub grantor_uuid: String,
@ -72,7 +72,7 @@ impl EmergencyAccess {
}) })
} }
pub async fn to_json_grantor_details(&self, conn: &DbConn) -> Value { pub async fn to_json_grantor_details(&self, conn: &mut DbConn) -> Value {
let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found."); let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found.");
json!({ json!({
@ -88,7 +88,7 @@ impl EmergencyAccess {
} }
#[allow(clippy::manual_map)] #[allow(clippy::manual_map)]
pub async fn to_json_grantee_details(&self, conn: &DbConn) -> Value { pub async fn to_json_grantee_details(&self, conn: &mut DbConn) -> Value {
let grantee_user = if let Some(grantee_uuid) = self.grantee_uuid.as_deref() { let grantee_user = if let Some(grantee_uuid) = self.grantee_uuid.as_deref() {
Some(User::find_by_uuid(grantee_uuid, conn).await.expect("Grantee user not found.")) Some(User::find_by_uuid(grantee_uuid, conn).await.expect("Grantee user not found."))
} else if let Some(email) = self.email.as_deref() { } else if let Some(email) = self.email.as_deref() {
@ -154,7 +154,7 @@ use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
impl EmergencyAccess { impl EmergencyAccess {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.grantor_uuid, conn).await; User::update_uuid_revision(&self.grantor_uuid, conn).await;
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
@ -189,7 +189,7 @@ impl EmergencyAccess {
} }
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for ea in Self::find_all_by_grantor_uuid(user_uuid, conn).await { for ea in Self::find_all_by_grantor_uuid(user_uuid, conn).await {
ea.delete(conn).await?; ea.delete(conn).await?;
} }
@ -199,7 +199,7 @@ impl EmergencyAccess {
Ok(()) Ok(())
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.grantor_uuid, conn).await; User::update_uuid_revision(&self.grantor_uuid, conn).await;
db_run! { conn: { db_run! { conn: {
@ -209,7 +209,7 @@ impl EmergencyAccess {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
.filter(emergency_access::uuid.eq(uuid)) .filter(emergency_access::uuid.eq(uuid))
@ -222,7 +222,7 @@ impl EmergencyAccess {
grantor_uuid: &str, grantor_uuid: &str,
grantee_uuid: &str, grantee_uuid: &str,
email: &str, email: &str,
conn: &DbConn, conn: &mut DbConn,
) -> Option<Self> { ) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
@ -233,7 +233,7 @@ impl EmergencyAccess {
}} }}
} }
pub async fn find_all_recoveries(conn: &DbConn) -> Vec<Self> { pub async fn find_all_recoveries(conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
.filter(emergency_access::status.eq(EmergencyAccessStatus::RecoveryInitiated as i32)) .filter(emergency_access::status.eq(EmergencyAccessStatus::RecoveryInitiated as i32))
@ -241,7 +241,7 @@ impl EmergencyAccess {
}} }}
} }
pub async fn find_by_uuid_and_grantor_uuid(uuid: &str, grantor_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid_and_grantor_uuid(uuid: &str, grantor_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
.filter(emergency_access::uuid.eq(uuid)) .filter(emergency_access::uuid.eq(uuid))
@ -251,7 +251,7 @@ impl EmergencyAccess {
}} }}
} }
pub async fn find_all_by_grantee_uuid(grantee_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_all_by_grantee_uuid(grantee_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
.filter(emergency_access::grantee_uuid.eq(grantee_uuid)) .filter(emergency_access::grantee_uuid.eq(grantee_uuid))
@ -259,7 +259,7 @@ impl EmergencyAccess {
}} }}
} }
pub async fn find_invited_by_grantee_email(grantee_email: &str, conn: &DbConn) -> Option<Self> { pub async fn find_invited_by_grantee_email(grantee_email: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
.filter(emergency_access::email.eq(grantee_email)) .filter(emergency_access::email.eq(grantee_email))
@ -269,7 +269,7 @@ impl EmergencyAccess {
}} }}
} }
pub async fn find_all_by_grantor_uuid(grantor_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_all_by_grantor_uuid(grantor_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
emergency_access::table emergency_access::table
.filter(emergency_access::grantor_uuid.eq(grantor_uuid)) .filter(emergency_access::grantor_uuid.eq(grantor_uuid))

View File

@ -2,8 +2,8 @@ use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "favorites"] #[diesel(table_name = favorites)]
#[primary_key(user_uuid, cipher_uuid)] #[diesel(primary_key(user_uuid, cipher_uuid))]
pub struct Favorite { pub struct Favorite {
pub user_uuid: String, pub user_uuid: String,
pub cipher_uuid: String, pub cipher_uuid: String,
@ -17,7 +17,7 @@ use crate::error::MapResult;
impl Favorite { impl Favorite {
// Returns whether the specified cipher is a favorite of the specified user. // Returns whether the specified cipher is a favorite of the specified user.
pub async fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &mut DbConn) -> bool {
db_run! { conn: { db_run! { conn: {
let query = favorites::table let query = favorites::table
.filter(favorites::cipher_uuid.eq(cipher_uuid)) .filter(favorites::cipher_uuid.eq(cipher_uuid))
@ -29,7 +29,7 @@ impl Favorite {
} }
// Sets whether the specified cipher is a favorite of the specified user. // Sets whether the specified cipher is a favorite of the specified user.
pub async fn set_favorite(favorite: bool, cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn set_favorite(favorite: bool, cipher_uuid: &str, user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
let (old, new) = (Self::is_favorite(cipher_uuid, user_uuid, conn).await, favorite); let (old, new) = (Self::is_favorite(cipher_uuid, user_uuid, conn).await, favorite);
match (old, new) { match (old, new) {
(false, true) => { (false, true) => {
@ -62,7 +62,7 @@ impl Favorite {
} }
// Delete all favorite entries associated with the specified cipher. // Delete all favorite entries associated with the specified cipher.
pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(favorites::table.filter(favorites::cipher_uuid.eq(cipher_uuid))) diesel::delete(favorites::table.filter(favorites::cipher_uuid.eq(cipher_uuid)))
.execute(conn) .execute(conn)
@ -71,7 +71,7 @@ impl Favorite {
} }
// Delete all favorite entries associated with the specified user. // Delete all favorite entries associated with the specified user.
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(favorites::table.filter(favorites::user_uuid.eq(user_uuid))) diesel::delete(favorites::table.filter(favorites::user_uuid.eq(user_uuid)))
.execute(conn) .execute(conn)
@ -81,7 +81,7 @@ impl Favorite {
/// Return a vec with (cipher_uuid) this will only contain favorite flagged ciphers /// Return a vec with (cipher_uuid) this will only contain favorite flagged ciphers
/// This is used during a full sync so we only need one query for all favorite cipher matches. /// This is used during a full sync so we only need one query for all favorite cipher matches.
pub async fn get_all_cipher_uuid_by_user(user_uuid: &str, conn: &DbConn) -> Vec<String> { pub async fn get_all_cipher_uuid_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<String> {
db_run! { conn: { db_run! { conn: {
favorites::table favorites::table
.filter(favorites::user_uuid.eq(user_uuid)) .filter(favorites::user_uuid.eq(user_uuid))

View File

@ -5,8 +5,8 @@ use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "folders"] #[diesel(table_name = folders)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct Folder { pub struct Folder {
pub uuid: String, pub uuid: String,
pub created_at: NaiveDateTime, pub created_at: NaiveDateTime,
@ -16,8 +16,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "folders_ciphers"] #[diesel(table_name = folders_ciphers)]
#[primary_key(cipher_uuid, folder_uuid)] #[diesel(primary_key(cipher_uuid, folder_uuid))]
pub struct FolderCipher { pub struct FolderCipher {
pub cipher_uuid: String, pub cipher_uuid: String,
pub folder_uuid: String, pub folder_uuid: String,
@ -67,7 +67,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Folder { impl Folder {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.user_uuid, conn).await; User::update_uuid_revision(&self.user_uuid, conn).await;
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
@ -102,7 +102,7 @@ impl Folder {
} }
} }
pub async fn delete(&self, conn: &DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.user_uuid, conn).await; User::update_uuid_revision(&self.user_uuid, conn).await;
FolderCipher::delete_all_by_folder(&self.uuid, conn).await?; FolderCipher::delete_all_by_folder(&self.uuid, conn).await?;
@ -113,14 +113,14 @@ impl Folder {
}} }}
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for folder in Self::find_by_user(user_uuid, conn).await { for folder in Self::find_by_user(user_uuid, conn).await {
folder.delete(conn).await?; folder.delete(conn).await?;
} }
Ok(()) Ok(())
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
folders::table folders::table
.filter(folders::uuid.eq(uuid)) .filter(folders::uuid.eq(uuid))
@ -130,7 +130,7 @@ impl Folder {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
folders::table folders::table
.filter(folders::user_uuid.eq(user_uuid)) .filter(folders::user_uuid.eq(user_uuid))
@ -142,7 +142,7 @@ impl Folder {
} }
impl FolderCipher { impl FolderCipher {
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: db_run! { conn:
sqlite, mysql { sqlite, mysql {
// Not checking for ForeignKey Constraints here. // Not checking for ForeignKey Constraints here.
@ -164,7 +164,7 @@ impl FolderCipher {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete( diesel::delete(
folders_ciphers::table folders_ciphers::table
@ -176,7 +176,7 @@ impl FolderCipher {
}} }}
} }
pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(folders_ciphers::table.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid))) diesel::delete(folders_ciphers::table.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid)))
.execute(conn) .execute(conn)
@ -184,7 +184,7 @@ impl FolderCipher {
}} }}
} }
pub async fn delete_all_by_folder(folder_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_folder(folder_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(folders_ciphers::table.filter(folders_ciphers::folder_uuid.eq(folder_uuid))) diesel::delete(folders_ciphers::table.filter(folders_ciphers::folder_uuid.eq(folder_uuid)))
.execute(conn) .execute(conn)
@ -192,7 +192,7 @@ impl FolderCipher {
}} }}
} }
pub async fn find_by_folder_and_cipher(folder_uuid: &str, cipher_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_folder_and_cipher(folder_uuid: &str, cipher_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
folders_ciphers::table folders_ciphers::table
.filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .filter(folders_ciphers::folder_uuid.eq(folder_uuid))
@ -203,7 +203,7 @@ impl FolderCipher {
}} }}
} }
pub async fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_folder(folder_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
folders_ciphers::table folders_ciphers::table
.filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .filter(folders_ciphers::folder_uuid.eq(folder_uuid))
@ -215,7 +215,7 @@ impl FolderCipher {
/// Return a vec with (cipher_uuid, folder_uuid) /// Return a vec with (cipher_uuid, folder_uuid)
/// This is used during a full sync so we only need one query for all folder matches. /// This is used during a full sync so we only need one query for all folder matches.
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<(String, String)> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<(String, String)> {
db_run! { conn: { db_run! { conn: {
folders_ciphers::table folders_ciphers::table
.inner_join(folders::table) .inner_join(folders::table)

View File

@ -3,8 +3,8 @@ use serde_json::Value;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "groups"] #[diesel(table_name = groups)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct Group { pub struct Group {
pub uuid: String, pub uuid: String,
pub organizations_uuid: String, pub organizations_uuid: String,
@ -16,8 +16,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "collections_groups"] #[diesel(table_name = collections_groups)]
#[primary_key(collections_uuid, groups_uuid)] #[diesel(primary_key(collections_uuid, groups_uuid))]
pub struct CollectionGroup { pub struct CollectionGroup {
pub collections_uuid: String, pub collections_uuid: String,
pub groups_uuid: String, pub groups_uuid: String,
@ -26,8 +26,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "groups_users"] #[diesel(table_name = groups_users)]
#[primary_key(groups_uuid, users_organizations_uuid)] #[diesel(primary_key(groups_uuid, users_organizations_uuid))]
pub struct GroupUser { pub struct GroupUser {
pub groups_uuid: String, pub groups_uuid: String,
pub users_organizations_uuid: String pub users_organizations_uuid: String
@ -117,7 +117,7 @@ use super::{User, UserOrganization};
/// Database methods /// Database methods
impl Group { impl Group {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
self.revision_date = Utc::now().naive_utc(); self.revision_date = Utc::now().naive_utc();
db_run! { conn: db_run! { conn:
@ -151,7 +151,7 @@ impl Group {
} }
} }
pub async fn find_by_organization(organizations_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_organization(organizations_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
groups::table groups::table
.filter(groups::organizations_uuid.eq(organizations_uuid)) .filter(groups::organizations_uuid.eq(organizations_uuid))
@ -161,7 +161,7 @@ impl Group {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
groups::table groups::table
.filter(groups::uuid.eq(uuid)) .filter(groups::uuid.eq(uuid))
@ -172,7 +172,7 @@ impl Group {
} }
//Returns all organizations the user has full access to //Returns all organizations the user has full access to
pub async fn gather_user_organizations_full_access(user_uuid: &str, conn: &DbConn) -> Vec<String> { pub async fn gather_user_organizations_full_access(user_uuid: &str, conn: &mut DbConn) -> Vec<String> {
db_run! { conn: { db_run! { conn: {
groups_users::table groups_users::table
.inner_join(users_organizations::table.on( .inner_join(users_organizations::table.on(
@ -190,7 +190,7 @@ impl Group {
}} }}
} }
pub async fn is_in_full_access_group(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> bool { pub async fn is_in_full_access_group(user_uuid: &str, org_uuid: &str, conn: &mut DbConn) -> bool {
db_run! { conn: { db_run! { conn: {
groups::table groups::table
.inner_join(groups_users::table.on( .inner_join(groups_users::table.on(
@ -208,7 +208,7 @@ impl Group {
}} }}
} }
pub async fn delete(&self, conn: &DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
CollectionGroup::delete_all_by_group(&self.uuid, conn).await?; CollectionGroup::delete_all_by_group(&self.uuid, conn).await?;
GroupUser::delete_all_by_group(&self.uuid, conn).await?; GroupUser::delete_all_by_group(&self.uuid, conn).await?;
@ -219,13 +219,13 @@ impl Group {
}} }}
} }
pub async fn update_revision(uuid: &str, conn: &DbConn) { pub async fn update_revision(uuid: &str, conn: &mut DbConn) {
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await { if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
warn!("Failed to update revision for {}: {:#?}", uuid, e); warn!("Failed to update revision for {}: {:#?}", uuid, e);
} }
} }
async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &DbConn) -> EmptyResult { async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &mut DbConn) -> EmptyResult {
db_run! {conn: { db_run! {conn: {
crate::util::retry(|| { crate::util::retry(|| {
diesel::update(groups::table.filter(groups::uuid.eq(uuid))) diesel::update(groups::table.filter(groups::uuid.eq(uuid)))
@ -238,7 +238,7 @@ impl Group {
} }
impl CollectionGroup { impl CollectionGroup {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
let group_users = GroupUser::find_by_group(&self.groups_uuid, conn).await; let group_users = GroupUser::find_by_group(&self.groups_uuid, conn).await;
for group_user in group_users { for group_user in group_users {
group_user.update_user_revision(conn).await; group_user.update_user_revision(conn).await;
@ -293,7 +293,7 @@ impl CollectionGroup {
} }
} }
pub async fn find_by_group(group_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_group(group_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
collections_groups::table collections_groups::table
.filter(collections_groups::groups_uuid.eq(group_uuid)) .filter(collections_groups::groups_uuid.eq(group_uuid))
@ -303,7 +303,7 @@ impl CollectionGroup {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
collections_groups::table collections_groups::table
.inner_join(groups_users::table.on( .inner_join(groups_users::table.on(
@ -320,7 +320,7 @@ impl CollectionGroup {
}} }}
} }
pub async fn find_by_collection(collection_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_collection(collection_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
collections_groups::table collections_groups::table
.filter(collections_groups::collections_uuid.eq(collection_uuid)) .filter(collections_groups::collections_uuid.eq(collection_uuid))
@ -331,7 +331,7 @@ impl CollectionGroup {
}} }}
} }
pub async fn delete(&self, conn: &DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
let group_users = GroupUser::find_by_group(&self.groups_uuid, conn).await; let group_users = GroupUser::find_by_group(&self.groups_uuid, conn).await;
for group_user in group_users { for group_user in group_users {
group_user.update_user_revision(conn).await; group_user.update_user_revision(conn).await;
@ -346,7 +346,7 @@ impl CollectionGroup {
}} }}
} }
pub async fn delete_all_by_group(group_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_group(group_uuid: &str, conn: &mut DbConn) -> EmptyResult {
let group_users = GroupUser::find_by_group(group_uuid, conn).await; let group_users = GroupUser::find_by_group(group_uuid, conn).await;
for group_user in group_users { for group_user in group_users {
group_user.update_user_revision(conn).await; group_user.update_user_revision(conn).await;
@ -360,7 +360,7 @@ impl CollectionGroup {
}} }}
} }
pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_collection(collection_uuid: &str, conn: &mut DbConn) -> EmptyResult {
let collection_assigned_to_groups = CollectionGroup::find_by_collection(collection_uuid, conn).await; let collection_assigned_to_groups = CollectionGroup::find_by_collection(collection_uuid, conn).await;
for collection_assigned_to_group in collection_assigned_to_groups { for collection_assigned_to_group in collection_assigned_to_groups {
let group_users = GroupUser::find_by_group(&collection_assigned_to_group.groups_uuid, conn).await; let group_users = GroupUser::find_by_group(&collection_assigned_to_group.groups_uuid, conn).await;
@ -379,7 +379,7 @@ impl CollectionGroup {
} }
impl GroupUser { impl GroupUser {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
self.update_user_revision(conn).await; self.update_user_revision(conn).await;
db_run! { conn: db_run! { conn:
@ -425,7 +425,7 @@ impl GroupUser {
} }
} }
pub async fn find_by_group(group_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_group(group_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
groups_users::table groups_users::table
.filter(groups_users::groups_uuid.eq(group_uuid)) .filter(groups_users::groups_uuid.eq(group_uuid))
@ -435,7 +435,7 @@ impl GroupUser {
}} }}
} }
pub async fn find_by_user(users_organizations_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(users_organizations_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
groups_users::table groups_users::table
.filter(groups_users::users_organizations_uuid.eq(users_organizations_uuid)) .filter(groups_users::users_organizations_uuid.eq(users_organizations_uuid))
@ -445,7 +445,7 @@ impl GroupUser {
}} }}
} }
pub async fn update_user_revision(&self, conn: &DbConn) { pub async fn update_user_revision(&self, conn: &mut DbConn) {
match UserOrganization::find_by_uuid(&self.users_organizations_uuid, conn).await { match UserOrganization::find_by_uuid(&self.users_organizations_uuid, conn).await {
Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await, Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await,
None => warn!("User could not be found!"), None => warn!("User could not be found!"),
@ -455,7 +455,7 @@ impl GroupUser {
pub async fn delete_by_group_id_and_user_id( pub async fn delete_by_group_id_and_user_id(
group_uuid: &str, group_uuid: &str,
users_organizations_uuid: &str, users_organizations_uuid: &str,
conn: &DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
match UserOrganization::find_by_uuid(users_organizations_uuid, conn).await { match UserOrganization::find_by_uuid(users_organizations_uuid, conn).await {
Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await, Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await,
@ -471,7 +471,7 @@ impl GroupUser {
}} }}
} }
pub async fn delete_all_by_group(group_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_group(group_uuid: &str, conn: &mut DbConn) -> EmptyResult {
let group_users = GroupUser::find_by_group(group_uuid, conn).await; let group_users = GroupUser::find_by_group(group_uuid, conn).await;
for group_user in group_users { for group_user in group_users {
group_user.update_user_revision(conn).await; group_user.update_user_revision(conn).await;
@ -485,7 +485,7 @@ impl GroupUser {
}} }}
} }
pub async fn delete_all_by_user(users_organizations_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(users_organizations_uuid: &str, conn: &mut DbConn) -> EmptyResult {
match UserOrganization::find_by_uuid(users_organizations_uuid, conn).await { match UserOrganization::find_by_uuid(users_organizations_uuid, conn).await {
Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await, Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await,
None => warn!("User could not be found!"), None => warn!("User could not be found!"),

View File

@ -10,8 +10,8 @@ use super::{TwoFactor, UserOrgStatus, UserOrgType, UserOrganization};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "org_policies"] #[diesel(table_name = org_policies)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct OrgPolicy { pub struct OrgPolicy {
pub uuid: String, pub uuid: String,
pub org_uuid: String, pub org_uuid: String,
@ -83,7 +83,7 @@ impl OrgPolicy {
/// Database methods /// Database methods
impl OrgPolicy { impl OrgPolicy {
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: db_run! { conn:
sqlite, mysql { sqlite, mysql {
match diesel::replace_into(org_policies::table) match diesel::replace_into(org_policies::table)
@ -126,7 +126,7 @@ impl OrgPolicy {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(org_policies::table.filter(org_policies::uuid.eq(self.uuid))) diesel::delete(org_policies::table.filter(org_policies::uuid.eq(self.uuid)))
.execute(conn) .execute(conn)
@ -134,7 +134,7 @@ impl OrgPolicy {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table
.filter(org_policies::uuid.eq(uuid)) .filter(org_policies::uuid.eq(uuid))
@ -144,7 +144,7 @@ impl OrgPolicy {
}} }}
} }
pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_org(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table
.filter(org_policies::org_uuid.eq(org_uuid)) .filter(org_policies::org_uuid.eq(org_uuid))
@ -154,7 +154,7 @@ impl OrgPolicy {
}} }}
} }
pub async fn find_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_confirmed_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table
.inner_join( .inner_join(
@ -172,7 +172,7 @@ impl OrgPolicy {
}} }}
} }
pub async fn find_by_org_and_type(org_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Option<Self> { pub async fn find_by_org_and_type(org_uuid: &str, policy_type: OrgPolicyType, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table
.filter(org_policies::org_uuid.eq(org_uuid)) .filter(org_policies::org_uuid.eq(org_uuid))
@ -183,7 +183,7 @@ impl OrgPolicy {
}} }}
} }
pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_organization(org_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(org_policies::table.filter(org_policies::org_uuid.eq(org_uuid))) diesel::delete(org_policies::table.filter(org_policies::org_uuid.eq(org_uuid)))
.execute(conn) .execute(conn)
@ -194,7 +194,7 @@ impl OrgPolicy {
pub async fn find_accepted_and_confirmed_by_user_and_active_policy( pub async fn find_accepted_and_confirmed_by_user_and_active_policy(
user_uuid: &str, user_uuid: &str,
policy_type: OrgPolicyType, policy_type: OrgPolicyType,
conn: &DbConn, conn: &mut DbConn,
) -> Vec<Self> { ) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table
@ -221,7 +221,7 @@ impl OrgPolicy {
pub async fn find_confirmed_by_user_and_active_policy( pub async fn find_confirmed_by_user_and_active_policy(
user_uuid: &str, user_uuid: &str,
policy_type: OrgPolicyType, policy_type: OrgPolicyType,
conn: &DbConn, conn: &mut DbConn,
) -> Vec<Self> { ) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
org_policies::table org_policies::table
@ -249,7 +249,7 @@ impl OrgPolicy {
user_uuid: &str, user_uuid: &str,
policy_type: OrgPolicyType, policy_type: OrgPolicyType,
exclude_org_uuid: Option<&str>, exclude_org_uuid: Option<&str>,
conn: &DbConn, conn: &mut DbConn,
) -> bool { ) -> bool {
for policy in for policy in
OrgPolicy::find_accepted_and_confirmed_by_user_and_active_policy(user_uuid, policy_type, conn).await OrgPolicy::find_accepted_and_confirmed_by_user_and_active_policy(user_uuid, policy_type, conn).await
@ -272,7 +272,7 @@ impl OrgPolicy {
user_uuid: &str, user_uuid: &str,
org_uuid: &str, org_uuid: &str,
exclude_current_org: bool, exclude_current_org: bool,
conn: &DbConn, conn: &mut DbConn,
) -> OrgPolicyResult { ) -> OrgPolicyResult {
// Enforce TwoFactor/TwoStep login // Enforce TwoFactor/TwoStep login
if TwoFactor::find_by_user(user_uuid, conn).await.is_empty() { if TwoFactor::find_by_user(user_uuid, conn).await.is_empty() {
@ -300,7 +300,7 @@ impl OrgPolicy {
/// Returns true if the user belongs to an org that has enabled the `DisableHideEmail` /// Returns true if the user belongs to an org that has enabled the `DisableHideEmail`
/// option of the `Send Options` policy, and the user is not an owner or admin of that org. /// option of the `Send Options` policy, and the user is not an owner or admin of that org.
pub async fn is_hide_email_disabled(user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_hide_email_disabled(user_uuid: &str, conn: &mut DbConn) -> bool {
for policy in for policy in
OrgPolicy::find_confirmed_by_user_and_active_policy(user_uuid, OrgPolicyType::SendOptions, conn).await OrgPolicy::find_confirmed_by_user_and_active_policy(user_uuid, OrgPolicyType::SendOptions, conn).await
{ {

View File

@ -6,8 +6,8 @@ use super::{CollectionUser, GroupUser, OrgPolicy, OrgPolicyType, User};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "organizations"] #[diesel(table_name = organizations)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct Organization { pub struct Organization {
pub uuid: String, pub uuid: String,
pub name: String, pub name: String,
@ -17,8 +17,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "users_organizations"] #[diesel(table_name = users_organizations)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct UserOrganization { pub struct UserOrganization {
pub uuid: String, pub uuid: String,
pub user_uuid: String, pub user_uuid: String,
@ -216,7 +216,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Organization { impl Organization {
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
for user_org in UserOrganization::find_by_org(&self.uuid, conn).await.iter() { for user_org in UserOrganization::find_by_org(&self.uuid, conn).await.iter() {
User::update_uuid_revision(&user_org.user_uuid, conn).await; User::update_uuid_revision(&user_org.user_uuid, conn).await;
} }
@ -253,7 +253,7 @@ impl Organization {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
use super::{Cipher, Collection}; use super::{Cipher, Collection};
Cipher::delete_all_by_organization(&self.uuid, conn).await?; Cipher::delete_all_by_organization(&self.uuid, conn).await?;
@ -268,7 +268,7 @@ impl Organization {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
organizations::table organizations::table
.filter(organizations::uuid.eq(uuid)) .filter(organizations::uuid.eq(uuid))
@ -277,7 +277,7 @@ impl Organization {
}} }}
} }
pub async fn get_all(conn: &DbConn) -> Vec<Self> { pub async fn get_all(conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
organizations::table.load::<OrganizationDb>(conn).expect("Error loading organizations").from_db() organizations::table.load::<OrganizationDb>(conn).expect("Error loading organizations").from_db()
}} }}
@ -285,7 +285,7 @@ impl Organization {
} }
impl UserOrganization { impl UserOrganization {
pub async fn to_json(&self, conn: &DbConn) -> Value { pub async fn to_json(&self, conn: &mut DbConn) -> Value {
let org = Organization::find_by_uuid(&self.org_uuid, conn).await.unwrap(); let org = Organization::find_by_uuid(&self.org_uuid, conn).await.unwrap();
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs // https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs
@ -350,7 +350,7 @@ impl UserOrganization {
}) })
} }
pub async fn to_json_user_details(&self, conn: &DbConn) -> Value { pub async fn to_json_user_details(&self, conn: &mut DbConn) -> Value {
let user = User::find_by_uuid(&self.user_uuid, conn).await.unwrap(); let user = User::find_by_uuid(&self.user_uuid, conn).await.unwrap();
// Because BitWarden want the status to be -1 for revoked users we need to catch that here. // Because BitWarden want the status to be -1 for revoked users we need to catch that here.
@ -383,7 +383,7 @@ impl UserOrganization {
}) })
} }
pub async fn to_json_details(&self, conn: &DbConn) -> Value { pub async fn to_json_details(&self, conn: &mut DbConn) -> Value {
let coll_uuids = if self.access_all { let coll_uuids = if self.access_all {
vec![] // If we have complete access, no need to fill the array vec![] // If we have complete access, no need to fill the array
} else { } else {
@ -421,7 +421,7 @@ impl UserOrganization {
"Object": "organizationUserDetails", "Object": "organizationUserDetails",
}) })
} }
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.user_uuid, conn).await; User::update_uuid_revision(&self.user_uuid, conn).await;
db_run! { conn: db_run! { conn:
@ -455,7 +455,7 @@ impl UserOrganization {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
User::update_uuid_revision(&self.user_uuid, conn).await; User::update_uuid_revision(&self.user_uuid, conn).await;
CollectionUser::delete_all_by_user_and_org(&self.user_uuid, &self.org_uuid, conn).await?; CollectionUser::delete_all_by_user_and_org(&self.user_uuid, &self.org_uuid, conn).await?;
@ -468,21 +468,21 @@ impl UserOrganization {
}} }}
} }
pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_organization(org_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for user_org in Self::find_by_org(org_uuid, conn).await { for user_org in Self::find_by_org(org_uuid, conn).await {
user_org.delete(conn).await?; user_org.delete(conn).await?;
} }
Ok(()) Ok(())
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for user_org in Self::find_any_state_by_user(user_uuid, conn).await { for user_org in Self::find_any_state_by_user(user_uuid, conn).await {
user_org.delete(conn).await?; user_org.delete(conn).await?;
} }
Ok(()) Ok(())
} }
pub async fn find_by_email_and_org(email: &str, org_id: &str, conn: &DbConn) -> Option<UserOrganization> { pub async fn find_by_email_and_org(email: &str, org_id: &str, conn: &mut DbConn) -> Option<UserOrganization> {
if let Some(user) = super::User::find_by_mail(email, conn).await { if let Some(user) = super::User::find_by_mail(email, conn).await {
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn).await { if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn).await {
return Some(user_org); return Some(user_org);
@ -504,7 +504,7 @@ impl UserOrganization {
(self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed) (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed)
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::uuid.eq(uuid)) .filter(users_organizations::uuid.eq(uuid))
@ -513,7 +513,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::uuid.eq(uuid)) .filter(users_organizations::uuid.eq(uuid))
@ -523,7 +523,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_confirmed_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
@ -533,7 +533,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_invited_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_invited_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
@ -543,7 +543,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_any_state_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_any_state_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
@ -552,7 +552,7 @@ impl UserOrganization {
}} }}
} }
pub async fn count_accepted_and_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> i64 { pub async fn count_accepted_and_confirmed_by_user(user_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
@ -564,7 +564,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_org(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
@ -573,7 +573,7 @@ impl UserOrganization {
}} }}
} }
pub async fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { pub async fn count_by_org(org_uuid: &str, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
@ -584,7 +584,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_org_and_type(org_uuid: &str, atype: UserOrgType, conn: &DbConn) -> Vec<Self> { pub async fn find_by_org_and_type(org_uuid: &str, atype: UserOrgType, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
@ -594,7 +594,7 @@ impl UserOrganization {
}} }}
} }
pub async fn count_confirmed_by_org_and_type(org_uuid: &str, atype: UserOrgType, conn: &DbConn) -> i64 { pub async fn count_confirmed_by_org_and_type(org_uuid: &str, atype: UserOrgType, conn: &mut DbConn) -> i64 {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
@ -606,7 +606,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
@ -616,7 +616,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
@ -625,7 +625,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.inner_join( .inner_join(
@ -644,7 +644,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
@ -666,7 +666,7 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_collection_and_org(collection_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_collection_and_org(collection_uuid: &str, org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))

View File

@ -5,9 +5,9 @@ use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "sends"] #[diesel(table_name = sends)]
#[changeset_options(treat_none_as_null="true")] #[diesel(treat_none_as_null = true)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct Send { pub struct Send {
pub uuid: String, pub uuid: String,
@ -101,7 +101,7 @@ impl Send {
} }
} }
pub async fn creator_identifier(&self, conn: &DbConn) -> Option<String> { pub async fn creator_identifier(&self, conn: &mut DbConn) -> Option<String> {
if let Some(hide_email) = self.hide_email { if let Some(hide_email) = self.hide_email {
if hide_email { if hide_email {
return None; return None;
@ -148,7 +148,7 @@ impl Send {
}) })
} }
pub async fn to_json_access(&self, conn: &DbConn) -> Value { pub async fn to_json_access(&self, conn: &mut DbConn) -> Value {
use crate::util::format_date; use crate::util::format_date;
let data: Value = serde_json::from_str(&self.data).unwrap_or_default(); let data: Value = serde_json::from_str(&self.data).unwrap_or_default();
@ -174,7 +174,7 @@ use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
impl Send { impl Send {
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
self.revision_date = Utc::now().naive_utc(); self.revision_date = Utc::now().naive_utc();
@ -209,7 +209,7 @@ impl Send {
} }
} }
pub async fn delete(&self, conn: &DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
if self.atype == SendType::File as i32 { if self.atype == SendType::File as i32 {
@ -224,13 +224,13 @@ impl Send {
} }
/// Purge all sends that are past their deletion date. /// Purge all sends that are past their deletion date.
pub async fn purge(conn: &DbConn) { pub async fn purge(conn: &mut DbConn) {
for send in Self::find_by_past_deletion_date(conn).await { for send in Self::find_by_past_deletion_date(conn).await {
send.delete(conn).await.ok(); send.delete(conn).await.ok();
} }
} }
pub async fn update_users_revision(&self, conn: &DbConn) -> Vec<String> { pub async fn update_users_revision(&self, conn: &mut DbConn) -> Vec<String> {
let mut user_uuids = Vec::new(); let mut user_uuids = Vec::new();
match &self.user_uuid { match &self.user_uuid {
Some(user_uuid) => { Some(user_uuid) => {
@ -244,14 +244,14 @@ impl Send {
user_uuids user_uuids
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
for send in Self::find_by_user(user_uuid, conn).await { for send in Self::find_by_user(user_uuid, conn).await {
send.delete(conn).await?; send.delete(conn).await?;
} }
Ok(()) Ok(())
} }
pub async fn find_by_access_id(access_id: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_access_id(access_id: &str, conn: &mut DbConn) -> Option<Self> {
use data_encoding::BASE64URL_NOPAD; use data_encoding::BASE64URL_NOPAD;
use uuid::Uuid; use uuid::Uuid;
@ -268,7 +268,7 @@ impl Send {
Self::find_by_uuid(&uuid, conn).await Self::find_by_uuid(&uuid, conn).await
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! {conn: { db_run! {conn: {
sends::table sends::table
.filter(sends::uuid.eq(uuid)) .filter(sends::uuid.eq(uuid))
@ -278,7 +278,7 @@ impl Send {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
sends::table sends::table
.filter(sends::user_uuid.eq(user_uuid)) .filter(sends::user_uuid.eq(user_uuid))
@ -286,7 +286,7 @@ impl Send {
}} }}
} }
pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_org(org_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
sends::table sends::table
.filter(sends::organization_uuid.eq(org_uuid)) .filter(sends::organization_uuid.eq(org_uuid))
@ -294,7 +294,7 @@ impl Send {
}} }}
} }
pub async fn find_by_past_deletion_date(conn: &DbConn) -> Vec<Self> { pub async fn find_by_past_deletion_date(conn: &mut DbConn) -> Vec<Self> {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
db_run! {conn: { db_run! {conn: {
sends::table sends::table

View File

@ -4,8 +4,8 @@ use crate::{api::EmptyResult, db::DbConn, error::MapResult};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "twofactor"] #[diesel(table_name = twofactor)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct TwoFactor { pub struct TwoFactor {
pub uuid: String, pub uuid: String,
pub user_uuid: String, pub user_uuid: String,
@ -68,7 +68,7 @@ impl TwoFactor {
/// Database methods /// Database methods
impl TwoFactor { impl TwoFactor {
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: db_run! { conn:
sqlite, mysql { sqlite, mysql {
match diesel::replace_into(twofactor::table) match diesel::replace_into(twofactor::table)
@ -107,7 +107,7 @@ impl TwoFactor {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(twofactor::table.filter(twofactor::uuid.eq(self.uuid))) diesel::delete(twofactor::table.filter(twofactor::uuid.eq(self.uuid)))
.execute(conn) .execute(conn)
@ -115,7 +115,7 @@ impl TwoFactor {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
twofactor::table twofactor::table
.filter(twofactor::user_uuid.eq(user_uuid)) .filter(twofactor::user_uuid.eq(user_uuid))
@ -126,7 +126,7 @@ impl TwoFactor {
}} }}
} }
pub async fn find_by_user_and_type(user_uuid: &str, atype: i32, conn: &DbConn) -> Option<Self> { pub async fn find_by_user_and_type(user_uuid: &str, atype: i32, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
twofactor::table twofactor::table
.filter(twofactor::user_uuid.eq(user_uuid)) .filter(twofactor::user_uuid.eq(user_uuid))
@ -137,7 +137,7 @@ impl TwoFactor {
}} }}
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid))) diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid)))
.execute(conn) .execute(conn)
@ -145,7 +145,7 @@ impl TwoFactor {
}} }}
} }
pub async fn migrate_u2f_to_webauthn(conn: &DbConn) -> EmptyResult { pub async fn migrate_u2f_to_webauthn(conn: &mut DbConn) -> EmptyResult {
let u2f_factors = db_run! { conn: { let u2f_factors = db_run! { conn: {
twofactor::table twofactor::table
.filter(twofactor::atype.eq(TwoFactorType::U2f as i32)) .filter(twofactor::atype.eq(TwoFactorType::U2f as i32))

View File

@ -4,8 +4,8 @@ use crate::{api::EmptyResult, auth::ClientIp, db::DbConn, error::MapResult, CONF
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "twofactor_incomplete"] #[diesel(table_name = twofactor_incomplete)]
#[primary_key(user_uuid, device_uuid)] #[diesel(primary_key(user_uuid, device_uuid))]
pub struct TwoFactorIncomplete { pub struct TwoFactorIncomplete {
pub user_uuid: String, pub user_uuid: String,
// This device UUID is simply what's claimed by the device. It doesn't // This device UUID is simply what's claimed by the device. It doesn't
@ -24,7 +24,7 @@ impl TwoFactorIncomplete {
device_uuid: &str, device_uuid: &str,
device_name: &str, device_name: &str,
ip: &ClientIp, ip: &ClientIp,
conn: &DbConn, conn: &mut DbConn,
) -> EmptyResult { ) -> EmptyResult {
if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() { if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() {
return Ok(()); return Ok(());
@ -52,7 +52,7 @@ impl TwoFactorIncomplete {
}} }}
} }
pub async fn mark_complete(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn mark_complete(user_uuid: &str, device_uuid: &str, conn: &mut DbConn) -> EmptyResult {
if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() { if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() {
return Ok(()); return Ok(());
} }
@ -60,7 +60,7 @@ impl TwoFactorIncomplete {
Self::delete_by_user_and_device(user_uuid, device_uuid, conn).await Self::delete_by_user_and_device(user_uuid, device_uuid, conn).await
} }
pub async fn find_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! { conn: { db_run! { conn: {
twofactor_incomplete::table twofactor_incomplete::table
.filter(twofactor_incomplete::user_uuid.eq(user_uuid)) .filter(twofactor_incomplete::user_uuid.eq(user_uuid))
@ -71,7 +71,7 @@ impl TwoFactorIncomplete {
}} }}
} }
pub async fn find_logins_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec<Self> { pub async fn find_logins_before(dt: &NaiveDateTime, conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
twofactor_incomplete::table twofactor_incomplete::table
.filter(twofactor_incomplete::login_time.lt(dt)) .filter(twofactor_incomplete::login_time.lt(dt))
@ -81,11 +81,11 @@ impl TwoFactorIncomplete {
}} }}
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
Self::delete_by_user_and_device(&self.user_uuid, &self.device_uuid, conn).await Self::delete_by_user_and_device(&self.user_uuid, &self.device_uuid, conn).await
} }
pub async fn delete_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(twofactor_incomplete::table diesel::delete(twofactor_incomplete::table
.filter(twofactor_incomplete::user_uuid.eq(user_uuid)) .filter(twofactor_incomplete::user_uuid.eq(user_uuid))
@ -95,7 +95,7 @@ impl TwoFactorIncomplete {
}} }}
} }
pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_user(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
db_run! { conn: { db_run! { conn: {
diesel::delete(twofactor_incomplete::table.filter(twofactor_incomplete::user_uuid.eq(user_uuid))) diesel::delete(twofactor_incomplete::table.filter(twofactor_incomplete::user_uuid.eq(user_uuid)))
.execute(conn) .execute(conn)

View File

@ -6,9 +6,9 @@ use crate::CONFIG;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "users"] #[diesel(table_name = users)]
#[changeset_options(treat_none_as_null="true")] #[diesel(treat_none_as_null = true)]
#[primary_key(uuid)] #[diesel(primary_key(uuid))]
pub struct User { pub struct User {
pub uuid: String, pub uuid: String,
pub enabled: bool, pub enabled: bool,
@ -32,7 +32,7 @@ db_object! {
pub private_key: Option<String>, pub private_key: Option<String>,
pub public_key: Option<String>, pub public_key: Option<String>,
#[column_name = "totp_secret"] // Note, this is only added to the UserDb structs, not to User #[diesel(column_name = "totp_secret")] // Note, this is only added to the UserDb structs, not to User
_totp_secret: Option<String>, _totp_secret: Option<String>,
pub totp_recover: Option<String>, pub totp_recover: Option<String>,
@ -49,8 +49,8 @@ db_object! {
} }
#[derive(Identifiable, Queryable, Insertable)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "invitations"] #[diesel(table_name = invitations)]
#[primary_key(email)] #[diesel(primary_key(email))]
pub struct Invitation { pub struct Invitation {
pub email: String, pub email: String,
} }
@ -192,18 +192,13 @@ use crate::db::DbConn;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
use futures::{stream, stream::StreamExt};
/// Database methods /// Database methods
impl User { impl User {
pub async fn to_json(&self, conn: &DbConn) -> Value { pub async fn to_json(&self, conn: &mut DbConn) -> Value {
let orgs_json = stream::iter(UserOrganization::find_confirmed_by_user(&self.uuid, conn).await) let mut orgs_json = Vec::new();
.then(|c| async { for c in UserOrganization::find_confirmed_by_user(&self.uuid, conn).await {
let c = c; // Move out this single variable orgs_json.push(c.to_json(conn).await);
c.to_json(conn).await }
})
.collect::<Vec<Value>>()
.await;
let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).await.is_empty(); let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).await.is_empty();
@ -235,7 +230,7 @@ impl User {
}) })
} }
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
if self.email.trim().is_empty() { if self.email.trim().is_empty() {
err!("User email can't be empty") err!("User email can't be empty")
} }
@ -273,7 +268,7 @@ impl User {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
for user_org in UserOrganization::find_confirmed_by_user(&self.uuid, conn).await { for user_org in UserOrganization::find_confirmed_by_user(&self.uuid, conn).await {
if user_org.atype == UserOrgType::Owner if user_org.atype == UserOrgType::Owner
&& UserOrganization::count_confirmed_by_org_and_type(&user_org.org_uuid, UserOrgType::Owner, conn).await && UserOrganization::count_confirmed_by_org_and_type(&user_org.org_uuid, UserOrgType::Owner, conn).await
@ -301,13 +296,13 @@ impl User {
}} }}
} }
pub async fn update_uuid_revision(uuid: &str, conn: &DbConn) { pub async fn update_uuid_revision(uuid: &str, conn: &mut DbConn) {
if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await { if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await {
warn!("Failed to update revision for {}: {:#?}", uuid, e); warn!("Failed to update revision for {}: {:#?}", uuid, e);
} }
} }
pub async fn update_all_revisions(conn: &DbConn) -> EmptyResult { pub async fn update_all_revisions(conn: &mut DbConn) -> EmptyResult {
let updated_at = Utc::now().naive_utc(); let updated_at = Utc::now().naive_utc();
db_run! {conn: { db_run! {conn: {
@ -320,13 +315,13 @@ impl User {
}} }}
} }
pub async fn update_revision(&mut self, conn: &DbConn) -> EmptyResult { pub async fn update_revision(&mut self, conn: &mut DbConn) -> EmptyResult {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
Self::_update_revision(&self.uuid, &self.updated_at, conn).await Self::_update_revision(&self.uuid, &self.updated_at, conn).await
} }
async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &DbConn) -> EmptyResult { async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &mut DbConn) -> EmptyResult {
db_run! {conn: { db_run! {conn: {
crate::util::retry(|| { crate::util::retry(|| {
diesel::update(users::table.filter(users::uuid.eq(uuid))) diesel::update(users::table.filter(users::uuid.eq(uuid)))
@ -337,7 +332,7 @@ impl User {
}} }}
} }
pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_mail(mail: &str, conn: &mut DbConn) -> Option<Self> {
let lower_mail = mail.to_lowercase(); let lower_mail = mail.to_lowercase();
db_run! {conn: { db_run! {conn: {
users::table users::table
@ -348,19 +343,19 @@ impl User {
}} }}
} }
pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
db_run! {conn: { db_run! {conn: {
users::table.filter(users::uuid.eq(uuid)).first::<UserDb>(conn).ok().from_db() users::table.filter(users::uuid.eq(uuid)).first::<UserDb>(conn).ok().from_db()
}} }}
} }
pub async fn get_all(conn: &DbConn) -> Vec<Self> { pub async fn get_all(conn: &mut DbConn) -> Vec<Self> {
db_run! {conn: { db_run! {conn: {
users::table.load::<UserDb>(conn).expect("Error loading users").from_db() users::table.load::<UserDb>(conn).expect("Error loading users").from_db()
}} }}
} }
pub async fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> { pub async fn last_active(&self, conn: &mut DbConn) -> Option<NaiveDateTime> {
match Device::find_latest_active_by_user(&self.uuid, conn).await { match Device::find_latest_active_by_user(&self.uuid, conn).await {
Some(device) => Some(device.updated_at), Some(device) => Some(device.updated_at),
None => None, None => None,
@ -376,7 +371,7 @@ impl Invitation {
} }
} }
pub async fn save(&self, conn: &DbConn) -> EmptyResult { pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
if self.email.trim().is_empty() { if self.email.trim().is_empty() {
err!("Invitation email can't be empty") err!("Invitation email can't be empty")
} }
@ -401,7 +396,7 @@ impl Invitation {
} }
} }
pub async fn delete(self, conn: &DbConn) -> EmptyResult { pub async fn delete(self, conn: &mut DbConn) -> EmptyResult {
db_run! {conn: { db_run! {conn: {
diesel::delete(invitations::table.filter(invitations::email.eq(self.email))) diesel::delete(invitations::table.filter(invitations::email.eq(self.email)))
.execute(conn) .execute(conn)
@ -409,7 +404,7 @@ impl Invitation {
}} }}
} }
pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option<Self> { pub async fn find_by_mail(mail: &str, conn: &mut DbConn) -> Option<Self> {
let lower_mail = mail.to_lowercase(); let lower_mail = mail.to_lowercase();
db_run! {conn: { db_run! {conn: {
invitations::table invitations::table
@ -420,7 +415,7 @@ impl Invitation {
}} }}
} }
pub async fn take(mail: &str, conn: &DbConn) -> bool { pub async fn take(mail: &str, conn: &mut DbConn) -> bool {
match Self::find_by_mail(mail, conn).await { match Self::find_by_mail(mail, conn).await {
Some(invitation) => invitation.delete(conn).await.is_ok(), Some(invitation) => invitation.delete(conn).await.is_ok(),
None => false, None => false,

View File

@ -36,7 +36,6 @@ macro_rules! make_error {
use diesel::r2d2::PoolError as R2d2Err; use diesel::r2d2::PoolError as R2d2Err;
use diesel::result::Error as DieselErr; use diesel::result::Error as DieselErr;
use diesel::ConnectionError as DieselConErr; use diesel::ConnectionError as DieselConErr;
use diesel_migrations::RunMigrationsError as DieselMigErr;
use handlebars::RenderError as HbErr; use handlebars::RenderError as HbErr;
use jsonwebtoken::errors::Error as JwtErr; use jsonwebtoken::errors::Error as JwtErr;
use lettre::address::AddressError as AddrErr; use lettre::address::AddressError as AddrErr;
@ -87,7 +86,6 @@ make_error! {
Rocket(RocketErr): _has_source, _api_error, Rocket(RocketErr): _has_source, _api_error,
DieselCon(DieselConErr): _has_source, _api_error, DieselCon(DieselConErr): _has_source, _api_error,
DieselMig(DieselMigErr): _has_source, _api_error,
Webauthn(WebauthnErr): _has_source, _api_error, Webauthn(WebauthnErr): _has_source, _api_error,
WebSocket(TungstError): _has_source, _api_error, WebSocket(TungstError): _has_source, _api_error,
} }

View File

@ -30,7 +30,7 @@
// The more key/value pairs there are the more recursion occurs. // The more key/value pairs there are the more recursion occurs.
// We want to keep this as low as possible, but not higher then 128. // We want to keep this as low as possible, but not higher then 128.
// If you go above 128 it will cause rust-analyzer to fail, // If you go above 128 it will cause rust-analyzer to fail,
#![recursion_limit = "87"] #![recursion_limit = "94"]
// When enabled use MiMalloc as malloc instead of the default malloc // When enabled use MiMalloc as malloc instead of the default malloc
#[cfg(feature = "enable_mimalloc")] #[cfg(feature = "enable_mimalloc")]
@ -108,7 +108,7 @@ async fn main() -> Result<(), Error> {
let pool = create_db_pool().await; let pool = create_db_pool().await;
schedule_jobs(pool.clone()).await; schedule_jobs(pool.clone()).await;
crate::db::models::TwoFactor::migrate_u2f_to_webauthn(&pool.get().await.unwrap()).await.unwrap(); crate::db::models::TwoFactor::migrate_u2f_to_webauthn(&mut pool.get().await.unwrap()).await.unwrap();
launch_rocket(pool, extra_debug).await // Blocks until program termination. launch_rocket(pool, extra_debug).await // Blocks until program termination.
} }

View File

@ -621,9 +621,9 @@ fn _process_key(key: &str) -> String {
// Retry methods // Retry methods
// //
pub fn retry<F, T, E>(func: F, max_tries: u32) -> Result<T, E> pub fn retry<F, T, E>(mut func: F, max_tries: u32) -> Result<T, E>
where where
F: Fn() -> Result<T, E>, F: FnMut() -> Result<T, E>,
{ {
let mut tries = 0; let mut tries = 0;
@ -642,9 +642,9 @@ where
} }
} }
pub async fn retry_db<F, T, E>(func: F, max_tries: u32) -> Result<T, E> pub async fn retry_db<F, T, E>(mut func: F, max_tries: u32) -> Result<T, E>
where where
F: Fn() -> Result<T, E>, F: FnMut() -> Result<T, E>,
E: std::error::Error, E: std::error::Error,
{ {
let mut tries = 0; let mut tries = 0;