2018-12-30 22:34:31 +00:00
|
|
|
//
|
2019-01-19 20:36:34 +00:00
|
|
|
// Web Headers and caching
|
2018-12-30 22:34:31 +00:00
|
|
|
//
|
2019-09-01 11:00:12 +00:00
|
|
|
use std::io::Cursor;
|
2018-02-10 00:00:55 +00:00
|
|
|
|
2020-07-14 16:00:09 +00:00
|
|
|
use rocket::{
|
|
|
|
fairing::{Fairing, Info, Kind},
|
|
|
|
http::{ContentType, Header, HeaderMap, Method, Status},
|
|
|
|
response::{self, Responder},
|
|
|
|
Data, Request, Response, Rocket,
|
|
|
|
};
|
|
|
|
|
2020-02-04 21:14:50 +00:00
|
|
|
use crate::CONFIG;
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
pub struct AppHeaders();
|
2018-12-23 21:37:02 +00:00
|
|
|
|
|
|
|
impl Fairing for AppHeaders {
|
|
|
|
fn info(&self) -> Info {
|
|
|
|
Info {
|
|
|
|
name: "Application Headers",
|
|
|
|
kind: Kind::Response,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_response(&self, _req: &Request, res: &mut Response) {
|
2021-02-24 02:51:07 +00:00
|
|
|
res.set_raw_header("Feature-Policy", "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; camera 'none'; encrypted-media 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; payment 'none'; picture-in-picture 'none'; sync-xhr 'self' https://haveibeenpwned.com https://2fa.directory; usb 'none'; vr 'none'");
|
2018-12-23 21:37:02 +00:00
|
|
|
res.set_raw_header("Referrer-Policy", "same-origin");
|
|
|
|
res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
|
|
|
|
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
|
|
|
res.set_raw_header("X-XSS-Protection", "1; mode=block");
|
2020-02-04 21:14:50 +00:00
|
|
|
let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors());
|
2018-12-23 21:37:02 +00:00
|
|
|
res.set_raw_header("Content-Security-Policy", csp);
|
|
|
|
|
|
|
|
// Disable cache unless otherwise specified
|
|
|
|
if !res.headers().contains("cache-control") {
|
|
|
|
res.set_raw_header("Cache-Control", "no-cache, no-store, max-age=0");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-01 11:00:12 +00:00
|
|
|
pub struct CORS();
|
|
|
|
|
2019-09-02 19:13:12 +00:00
|
|
|
impl CORS {
|
|
|
|
fn get_header(headers: &HeaderMap, name: &str) -> String {
|
|
|
|
match headers.get_one(name) {
|
|
|
|
Some(h) => h.to_string(),
|
|
|
|
_ => "".to_string(),
|
|
|
|
}
|
|
|
|
}
|
2019-10-01 15:26:58 +00:00
|
|
|
|
|
|
|
fn valid_url(url: String) -> String {
|
|
|
|
match url.as_ref() {
|
|
|
|
"file://" => "*".to_string(),
|
|
|
|
_ => url,
|
|
|
|
}
|
|
|
|
}
|
2019-09-02 19:13:12 +00:00
|
|
|
}
|
|
|
|
|
2019-09-01 11:00:12 +00:00
|
|
|
impl Fairing for CORS {
|
|
|
|
fn info(&self) -> Info {
|
|
|
|
Info {
|
2019-09-23 05:44:44 +00:00
|
|
|
name: "CORS",
|
2019-12-06 21:19:07 +00:00
|
|
|
kind: Kind::Response,
|
2019-09-01 11:00:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_response(&self, request: &Request, response: &mut Response) {
|
2019-09-02 19:13:12 +00:00
|
|
|
let req_headers = request.headers();
|
|
|
|
|
|
|
|
// We need to explicitly get the Origin header for Access-Control-Allow-Origin
|
2020-05-03 15:24:51 +00:00
|
|
|
let req_allow_origin = CORS::valid_url(CORS::get_header(req_headers, "Origin"));
|
2019-09-02 19:13:12 +00:00
|
|
|
|
2019-10-01 15:26:58 +00:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Origin", req_allow_origin));
|
2019-09-02 19:13:12 +00:00
|
|
|
|
2019-10-01 15:26:58 +00:00
|
|
|
if request.method() == Method::Options {
|
2020-05-03 15:24:51 +00:00
|
|
|
let req_allow_headers = CORS::get_header(req_headers, "Access-Control-Request-Headers");
|
|
|
|
let req_allow_method = CORS::get_header(req_headers, "Access-Control-Request-Method");
|
2019-09-01 11:00:12 +00:00
|
|
|
|
2019-09-23 05:44:44 +00:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Methods", req_allow_method));
|
2019-09-02 19:13:12 +00:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Headers", req_allow_headers));
|
2019-09-01 11:00:12 +00:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
|
2019-09-02 19:13:12 +00:00
|
|
|
response.set_status(Status::Ok);
|
2019-09-01 11:00:12 +00:00
|
|
|
response.set_header(ContentType::Plain);
|
|
|
|
response.set_sized_body(Cursor::new(""));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-19 20:36:34 +00:00
|
|
|
pub struct Cached<R>(R, &'static str);
|
|
|
|
|
|
|
|
impl<R> Cached<R> {
|
2020-05-03 15:24:51 +00:00
|
|
|
pub const fn long(r: R) -> Cached<R> {
|
2019-01-19 20:36:34 +00:00
|
|
|
// 7 days
|
2020-05-03 15:24:51 +00:00
|
|
|
Self(r, "public, max-age=604800")
|
2019-01-19 20:36:34 +00:00
|
|
|
}
|
|
|
|
|
2020-05-03 15:24:51 +00:00
|
|
|
pub const fn short(r: R) -> Cached<R> {
|
2019-01-19 20:36:34 +00:00
|
|
|
// 10 minutes
|
2020-05-03 15:24:51 +00:00
|
|
|
Self(r, "public, max-age=600")
|
2019-01-19 20:36:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> {
|
|
|
|
fn respond_to(self, req: &Request) -> response::Result<'r> {
|
|
|
|
match self.0.respond_to(req) {
|
|
|
|
Ok(mut res) => {
|
|
|
|
res.set_raw_header("Cache-Control", self.1);
|
|
|
|
Ok(res)
|
|
|
|
}
|
|
|
|
e @ Err(_) => e,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-19 05:27:00 +00:00
|
|
|
// Log all the routes from the main paths list, and the attachments endpoint
|
2019-12-06 21:19:07 +00:00
|
|
|
// Effectively ignores, any static file route, and the alive endpoint
|
|
|
|
const LOGGED_ROUTES: [&str; 6] = [
|
|
|
|
"/api",
|
|
|
|
"/admin",
|
|
|
|
"/identity",
|
|
|
|
"/icons",
|
|
|
|
"/notifications/hub/negotiate",
|
|
|
|
"/attachments",
|
|
|
|
];
|
|
|
|
|
|
|
|
// Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts
|
|
|
|
pub struct BetterLogging(pub bool);
|
|
|
|
impl Fairing for BetterLogging {
|
|
|
|
fn info(&self) -> Info {
|
|
|
|
Info {
|
|
|
|
name: "Better Logging",
|
|
|
|
kind: Kind::Launch | Kind::Request | Kind::Response,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_launch(&self, rocket: &Rocket) {
|
|
|
|
if self.0 {
|
|
|
|
info!(target: "routes", "Routes loaded:");
|
2020-02-04 21:14:50 +00:00
|
|
|
let mut routes: Vec<_> = rocket.routes().collect();
|
|
|
|
routes.sort_by_key(|r| r.uri.path());
|
|
|
|
for route in routes {
|
2019-12-06 21:19:07 +00:00
|
|
|
if route.rank < 0 {
|
|
|
|
info!(target: "routes", "{:<6} {}", route.method, route.uri);
|
|
|
|
} else {
|
|
|
|
info!(target: "routes", "{:<6} {} [{}]", route.method, route.uri, route.rank);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let config = rocket.config();
|
|
|
|
let scheme = if config.tls_enabled() { "https" } else { "http" };
|
|
|
|
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
|
|
|
|
info!(target: "start", "Rocket has launched from {}", addr);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_request(&self, request: &mut Request<'_>, _data: &Data) {
|
2019-12-06 21:55:29 +00:00
|
|
|
let method = request.method();
|
|
|
|
if !self.0 && method == Method::Options {
|
|
|
|
return;
|
|
|
|
}
|
2019-12-29 14:29:46 +00:00
|
|
|
let uri = request.uri();
|
|
|
|
let uri_path = uri.path();
|
2020-07-21 05:33:13 +00:00
|
|
|
let uri_subpath = uri_path.strip_prefix(&CONFIG.domain_path()).unwrap_or(uri_path);
|
2020-02-19 05:27:00 +00:00
|
|
|
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
2019-12-29 14:29:46 +00:00
|
|
|
match uri.query() {
|
|
|
|
Some(q) => info!(target: "request", "{} {}?{}", method, uri_path, &q[..q.len().min(30)]),
|
|
|
|
None => info!(target: "request", "{} {}", method, uri_path),
|
|
|
|
};
|
2019-12-06 21:19:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_response(&self, request: &Request, response: &mut Response) {
|
2019-12-06 21:55:29 +00:00
|
|
|
if !self.0 && request.method() == Method::Options {
|
|
|
|
return;
|
|
|
|
}
|
2020-07-21 05:33:13 +00:00
|
|
|
let uri_path = request.uri().path();
|
|
|
|
let uri_subpath = uri_path.strip_prefix(&CONFIG.domain_path()).unwrap_or(uri_path);
|
2020-02-19 05:27:00 +00:00
|
|
|
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
2019-12-06 21:19:07 +00:00
|
|
|
let status = response.status();
|
2020-05-03 15:24:51 +00:00
|
|
|
if let Some(route) = request.route() {
|
2019-12-06 21:19:07 +00:00
|
|
|
info!(target: "response", "{} => {} {}", route, status.code, status.reason)
|
|
|
|
} else {
|
|
|
|
info!(target: "response", "{} {}", status.code, status.reason)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
//
|
|
|
|
// File handling
|
|
|
|
//
|
2020-07-14 16:00:09 +00:00
|
|
|
use std::{
|
|
|
|
fs::{self, File},
|
|
|
|
io::{Read, Result as IOResult},
|
|
|
|
path::Path,
|
|
|
|
};
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
pub fn file_exists(path: &str) -> bool {
|
|
|
|
Path::new(path).exists()
|
|
|
|
}
|
|
|
|
|
2018-12-23 21:37:02 +00:00
|
|
|
pub fn read_file(path: &str) -> IOResult<Vec<u8>> {
|
2018-02-10 00:00:55 +00:00
|
|
|
let mut contents: Vec<u8> = Vec::new();
|
2018-12-30 22:34:31 +00:00
|
|
|
|
2018-12-23 21:37:02 +00:00
|
|
|
let mut file = File::open(Path::new(path))?;
|
|
|
|
file.read_to_end(&mut contents)?;
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
Ok(contents)
|
|
|
|
}
|
|
|
|
|
2019-02-02 15:47:27 +00:00
|
|
|
pub fn read_file_string(path: &str) -> IOResult<String> {
|
|
|
|
let mut contents = String::new();
|
|
|
|
|
|
|
|
let mut file = File::open(Path::new(path))?;
|
|
|
|
file.read_to_string(&mut contents)?;
|
|
|
|
|
|
|
|
Ok(contents)
|
|
|
|
}
|
|
|
|
|
2018-12-23 21:37:02 +00:00
|
|
|
pub fn delete_file(path: &str) -> IOResult<()> {
|
|
|
|
let res = fs::remove_file(path);
|
2018-02-15 18:05:57 +00:00
|
|
|
|
|
|
|
if let Some(parent) = Path::new(path).parent() {
|
2018-02-22 23:38:54 +00:00
|
|
|
// If the directory isn't empty, this returns an error, which we ignore
|
|
|
|
// We only want to delete the folder if it's empty
|
|
|
|
fs::remove_dir(parent).ok();
|
2018-02-15 18:05:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
res
|
2018-02-14 23:40:34 +00:00
|
|
|
}
|
|
|
|
|
2018-06-11 13:44:37 +00:00
|
|
|
const UNITS: [&str; 6] = ["bytes", "KB", "MB", "GB", "TB", "PB"];
|
2018-02-14 23:40:34 +00:00
|
|
|
|
|
|
|
pub fn get_display_size(size: i32) -> String {
|
2019-02-20 16:54:18 +00:00
|
|
|
let mut size: f64 = size.into();
|
2018-02-14 23:40:34 +00:00
|
|
|
let mut unit_counter = 0;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
if size > 1024. {
|
|
|
|
size /= 1024.;
|
|
|
|
unit_counter += 1;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
2018-12-30 22:34:31 +00:00
|
|
|
}
|
2018-02-14 23:40:34 +00:00
|
|
|
|
2020-05-22 10:10:56 +00:00
|
|
|
format!("{:.2} {}", size, UNITS[unit_counter])
|
2018-02-14 23:40:34 +00:00
|
|
|
}
|
|
|
|
|
2018-12-07 13:32:40 +00:00
|
|
|
pub fn get_uuid() -> String {
|
|
|
|
uuid::Uuid::new_v4().to_string()
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
//
|
|
|
|
// String util methods
|
|
|
|
//
|
2018-02-10 00:00:55 +00:00
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
use std::str::FromStr;
|
2018-02-10 00:00:55 +00:00
|
|
|
|
|
|
|
pub fn upcase_first(s: &str) -> String {
|
|
|
|
let mut c = s.chars();
|
|
|
|
match c.next() {
|
|
|
|
None => String::new(),
|
|
|
|
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-14 16:00:09 +00:00
|
|
|
pub fn try_parse_string<S, T>(string: Option<S>) -> Option<T>
|
2018-12-30 22:34:31 +00:00
|
|
|
where
|
|
|
|
S: AsRef<str>,
|
|
|
|
T: FromStr,
|
|
|
|
{
|
2020-07-14 16:00:09 +00:00
|
|
|
if let Some(Ok(value)) = string.map(|s| s.as_ref().parse::<T>()) {
|
2018-02-10 00:00:55 +00:00
|
|
|
Some(value)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
//
|
|
|
|
// Env methods
|
|
|
|
//
|
2018-09-13 18:59:51 +00:00
|
|
|
|
|
|
|
use std::env;
|
|
|
|
|
2020-11-29 01:31:49 +00:00
|
|
|
pub fn get_env_str_value(key: &str) -> Option<String>
|
|
|
|
{
|
|
|
|
let key_file = format!("{}_FILE", key);
|
|
|
|
let value_from_env = env::var(key);
|
|
|
|
let value_file = env::var(&key_file);
|
|
|
|
|
|
|
|
match (value_from_env, value_file) {
|
|
|
|
(Ok(_), Ok(_)) => panic!("You should not define both {} and {}!", key, key_file),
|
|
|
|
(Ok(v_env), Err(_)) => Some(v_env),
|
|
|
|
(Err(_), Ok(v_file)) => match fs::read_to_string(v_file) {
|
|
|
|
Ok(content) => Some(content.trim().to_string()),
|
|
|
|
Err(e) => panic!("Failed to load {}: {:?}", key, e)
|
|
|
|
},
|
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
pub fn get_env<V>(key: &str) -> Option<V>
|
|
|
|
where
|
|
|
|
V: FromStr,
|
|
|
|
{
|
2020-11-29 01:31:49 +00:00
|
|
|
try_parse_string(get_env_str_value(key))
|
2018-09-13 18:59:51 +00:00
|
|
|
}
|
|
|
|
|
2020-01-20 21:28:54 +00:00
|
|
|
const TRUE_VALUES: &[&str] = &["true", "t", "yes", "y", "1"];
|
|
|
|
const FALSE_VALUES: &[&str] = &["false", "f", "no", "n", "0"];
|
|
|
|
|
|
|
|
pub fn get_env_bool(key: &str) -> Option<bool> {
|
2020-11-29 01:31:49 +00:00
|
|
|
match get_env_str_value(key) {
|
|
|
|
Some(val) if TRUE_VALUES.contains(&val.to_lowercase().as_ref()) => Some(true),
|
|
|
|
Some(val) if FALSE_VALUES.contains(&val.to_lowercase().as_ref()) => Some(false),
|
2020-01-20 21:28:54 +00:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
//
|
|
|
|
// Date util methods
|
|
|
|
//
|
2018-02-10 00:00:55 +00:00
|
|
|
|
2020-12-12 06:47:54 +00:00
|
|
|
use chrono::{DateTime, Local, NaiveDateTime, TimeZone};
|
|
|
|
use chrono_tz::Tz;
|
2018-02-10 00:00:55 +00:00
|
|
|
|
2020-12-12 06:47:54 +00:00
|
|
|
/// Formats a UTC-offset `NaiveDateTime` in the format used by Bitwarden API
|
|
|
|
/// responses with "date" fields (`CreationDate`, `RevisionDate`, etc.).
|
|
|
|
pub fn format_date(dt: &NaiveDateTime) -> String {
|
|
|
|
dt.format("%Y-%m-%dT%H:%M:%S%.6fZ").to_string()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Formats a `DateTime<Local>` using the specified format string.
|
|
|
|
///
|
|
|
|
/// For a `DateTime<Local>`, the `%Z` specifier normally formats as the
|
|
|
|
/// time zone's UTC offset (e.g., `+00:00`). In this function, if the
|
|
|
|
/// `TZ` environment variable is set, then `%Z` instead formats as the
|
|
|
|
/// abbreviation for that time zone (e.g., `UTC`).
|
|
|
|
pub fn format_datetime_local(dt: &DateTime<Local>, fmt: &str) -> String {
|
|
|
|
// Try parsing the `TZ` environment variable to enable formatting `%Z` as
|
|
|
|
// a time zone abbreviation.
|
|
|
|
if let Ok(tz) = env::var("TZ") {
|
|
|
|
if let Ok(tz) = tz.parse::<Tz>() {
|
|
|
|
return dt.with_timezone(&tz).format(fmt).to_string();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, fall back to formatting `%Z` as a UTC offset.
|
|
|
|
dt.format(fmt).to_string()
|
|
|
|
}
|
2018-02-10 00:00:55 +00:00
|
|
|
|
2020-12-12 06:47:54 +00:00
|
|
|
/// Formats a UTC-offset `NaiveDateTime` as a datetime in the local time zone.
|
|
|
|
///
|
|
|
|
/// This function basically converts the `NaiveDateTime` to a `DateTime<Local>`,
|
|
|
|
/// and then calls [format_datetime_local](crate::util::format_datetime_local).
|
|
|
|
pub fn format_naive_datetime_local(dt: &NaiveDateTime, fmt: &str) -> String {
|
|
|
|
format_datetime_local(&Local.from_utc_datetime(dt), fmt)
|
2018-02-10 00:00:55 +00:00
|
|
|
}
|
2018-05-31 22:18:50 +00:00
|
|
|
|
2021-02-27 03:40:12 +00:00
|
|
|
//
|
|
|
|
// Deployment environment methods
|
|
|
|
//
|
|
|
|
|
|
|
|
/// Returns true if the program is running in Docker or Podman.
|
|
|
|
pub fn is_running_in_docker() -> bool {
|
|
|
|
Path::new("/.dockerenv").exists() || Path::new("/run/.containerenv").exists()
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
//
|
|
|
|
// Deserialization methods
|
|
|
|
//
|
2018-05-31 22:18:50 +00:00
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
use std::fmt;
|
2018-05-31 22:18:50 +00:00
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
use serde::de::{self, DeserializeOwned, Deserializer, MapAccess, SeqAccess, Visitor};
|
2018-07-12 19:46:50 +00:00
|
|
|
use serde_json::{self, Value};
|
|
|
|
|
|
|
|
pub type JsonMap = serde_json::Map<String, Value>;
|
2018-05-31 22:18:50 +00:00
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
#[derive(PartialEq, Serialize, Deserialize)]
|
|
|
|
pub struct UpCase<T: DeserializeOwned> {
|
|
|
|
#[serde(deserialize_with = "upcase_deserialize")]
|
|
|
|
#[serde(flatten)]
|
|
|
|
pub data: T,
|
|
|
|
}
|
|
|
|
|
2018-12-30 22:34:31 +00:00
|
|
|
// https://github.com/serde-rs/serde/issues/586
|
2018-05-31 22:18:50 +00:00
|
|
|
pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
|
2018-12-30 22:34:31 +00:00
|
|
|
where
|
|
|
|
T: DeserializeOwned,
|
|
|
|
D: Deserializer<'de>,
|
2018-05-31 22:18:50 +00:00
|
|
|
{
|
2018-06-12 21:01:14 +00:00
|
|
|
let d = deserializer.deserialize_any(UpCaseVisitor)?;
|
|
|
|
T::deserialize(d).map_err(de::Error::custom)
|
2018-05-31 22:18:50 +00:00
|
|
|
}
|
|
|
|
|
2018-06-12 21:01:14 +00:00
|
|
|
struct UpCaseVisitor;
|
|
|
|
|
|
|
|
impl<'de> Visitor<'de> for UpCaseVisitor {
|
|
|
|
type Value = Value;
|
|
|
|
|
|
|
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
formatter.write_str("an object or an array")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
2018-12-30 22:34:31 +00:00
|
|
|
where
|
|
|
|
A: MapAccess<'de>,
|
2018-06-12 21:01:14 +00:00
|
|
|
{
|
2018-07-12 19:46:50 +00:00
|
|
|
let mut result_map = JsonMap::new();
|
2018-06-12 21:01:14 +00:00
|
|
|
|
|
|
|
while let Some((key, value)) = map.next_entry()? {
|
2019-03-18 21:02:37 +00:00
|
|
|
result_map.insert(upcase_first(key), upcase_value(value));
|
2018-06-12 21:01:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Object(result_map))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
2018-12-30 22:34:31 +00:00
|
|
|
where
|
|
|
|
A: SeqAccess<'de>,
|
|
|
|
{
|
2018-06-12 21:01:14 +00:00
|
|
|
let mut result_seq = Vec::<Value>::new();
|
|
|
|
|
|
|
|
while let Some(value) = seq.next_element()? {
|
2019-03-18 21:02:37 +00:00
|
|
|
result_seq.push(upcase_value(value));
|
2018-06-12 21:01:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Array(result_seq))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 21:02:37 +00:00
|
|
|
fn upcase_value(value: Value) -> Value {
|
|
|
|
if let Value::Object(map) = value {
|
2018-06-12 21:01:14 +00:00
|
|
|
let mut new_value = json!({});
|
2018-12-30 22:34:31 +00:00
|
|
|
|
2019-03-18 21:02:37 +00:00
|
|
|
for (key, val) in map.into_iter() {
|
|
|
|
let processed_key = _process_key(&key);
|
2018-06-12 21:01:14 +00:00
|
|
|
new_value[processed_key] = upcase_value(val);
|
|
|
|
}
|
|
|
|
new_value
|
2019-03-18 21:02:37 +00:00
|
|
|
} else if let Value::Array(array) = value {
|
2018-06-12 21:01:14 +00:00
|
|
|
// Initialize array with null values
|
|
|
|
let mut new_value = json!(vec![Value::Null; array.len()]);
|
|
|
|
|
2019-03-18 21:02:37 +00:00
|
|
|
for (index, val) in array.into_iter().enumerate() {
|
2018-06-12 21:01:14 +00:00
|
|
|
new_value[index] = upcase_value(val);
|
|
|
|
}
|
|
|
|
new_value
|
|
|
|
} else {
|
2019-03-18 21:02:37 +00:00
|
|
|
value
|
2018-06-12 21:01:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _process_key(key: &str) -> String {
|
|
|
|
match key.to_lowercase().as_ref() {
|
|
|
|
"ssn" => "SSN".into(),
|
2018-12-12 21:15:54 +00:00
|
|
|
_ => self::upcase_first(key),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// Retry methods
|
|
|
|
//
|
|
|
|
|
2020-10-03 20:31:52 +00:00
|
|
|
pub fn retry<F, T, E>(func: F, max_tries: u32) -> Result<T, E>
|
2018-12-12 21:15:54 +00:00
|
|
|
where
|
|
|
|
F: Fn() -> Result<T, E>,
|
|
|
|
{
|
|
|
|
use std::{thread::sleep, time::Duration};
|
|
|
|
let mut tries = 0;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
match func() {
|
|
|
|
ok @ Ok(_) => return ok,
|
|
|
|
err @ Err(_) => {
|
|
|
|
tries += 1;
|
|
|
|
|
|
|
|
if tries >= max_tries {
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
sleep(Duration::from_millis(500));
|
|
|
|
}
|
|
|
|
}
|
2018-06-12 21:01:14 +00:00
|
|
|
}
|
2018-05-31 22:18:50 +00:00
|
|
|
}
|
2020-10-03 20:31:52 +00:00
|
|
|
|
|
|
|
pub fn retry_db<F, T, E>(func: F, max_tries: u32) -> Result<T, E>
|
|
|
|
where
|
|
|
|
F: Fn() -> Result<T, E>,
|
|
|
|
E: std::error::Error,
|
|
|
|
{
|
|
|
|
use std::{thread::sleep, time::Duration};
|
|
|
|
let mut tries = 0;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
match func() {
|
|
|
|
ok @ Ok(_) => return ok,
|
|
|
|
Err(e) => {
|
|
|
|
tries += 1;
|
|
|
|
|
|
|
|
if tries >= max_tries && max_tries > 0 {
|
|
|
|
return Err(e);
|
|
|
|
}
|
|
|
|
|
|
|
|
warn!("Can't connect to database, retrying: {:?}", e);
|
|
|
|
|
|
|
|
sleep(Duration::from_millis(1_000));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|