Merge remote-tracking branch 'origin/master' into fmt

This commit is contained in:
Jake Howard
2021-04-06 21:55:28 +01:00
25 changed files with 326 additions and 147 deletions

View File

@@ -1,7 +1,7 @@
use once_cell::sync::Lazy;
use serde::de::DeserializeOwned;
use serde_json::Value;
use std::{env, process::Command, time::Duration};
use std::{env, time::Duration};
use reqwest::{blocking::Client, header::USER_AGENT};
use rocket::{
@@ -64,10 +64,8 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| {
.unwrap_or("Unknown")
});
static CAN_BACKUP: Lazy<bool> = Lazy::new(|| {
DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false)
&& Command::new("sqlite3").arg("-version").status().is_ok()
});
static CAN_BACKUP: Lazy<bool> =
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
#[get("/")]
fn admin_disabled() -> &'static str {
@@ -503,9 +501,16 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
use std::net::ToSocketAddrs;
// Get current running versions
let vault_version_path = format!("{}/{}", CONFIG.web_vault_folder(), "version.json");
let vault_version_str = read_file_string(&vault_version_path)?;
let web_vault_version: WebVaultVersion = serde_json::from_str(&vault_version_str)?;
let web_vault_version: WebVaultVersion =
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
Ok(s) => serde_json::from_str(&s)?,
_ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
Ok(s) => serde_json::from_str(&s)?,
_ => WebVaultVersion {
version: String::from("Version file missing"),
},
},
};
// Execute some environment checks
let running_within_docker = is_running_in_docker();
@@ -561,9 +566,10 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
let diagnostics_json = json!({
"dns_resolved": dns_resolved,
"web_vault_version": web_vault_version.version,
"latest_release": latest_release,
"latest_commit": latest_commit,
"web_vault_enabled": &CONFIG.web_vault_enabled(),
"web_vault_version": web_vault_version.version,
"latest_web_build": latest_web_build,
"running_within_docker": running_within_docker,
"has_http_access": has_http_access,
@@ -575,6 +581,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
"db_type": *DB_TYPE,
"db_version": get_sql_server_version(&conn),
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
});
@@ -600,11 +607,11 @@ fn delete_config(_token: AdminToken) -> EmptyResult {
}
#[post("/config/backup_db")]
fn backup_db(_token: AdminToken) -> EmptyResult {
fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult {
if *CAN_BACKUP {
backup_database()
backup_database(&conn)
} else {
err!("Can't back up current DB (either it's not SQLite or the 'sqlite' binary is not present)");
err!("Can't back up current DB (Only SQLite supports this feature)");
}
}

View File

@@ -13,7 +13,7 @@ use crate::{
api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType},
auth::Headers,
crypto,
db::{models::*, DbConn},
db::{models::*, DbConn, DbPool},
CONFIG,
};
@@ -77,6 +77,15 @@ pub fn routes() -> Vec<Route> {
]
}
pub fn purge_trashed_ciphers(pool: DbPool) {
debug!("Purging trashed ciphers");
if let Ok(conn) = pool.get() {
Cipher::purge_trash(&conn);
} else {
error!("Failed to get DB connection while purging trashed ciphers")
}
}
#[derive(FromForm, Default)]
struct SyncData {
#[form(field = "excludeDomains")]
@@ -118,6 +127,7 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> {
"Ciphers": ciphers_json,
"Domains": domains_json,
"Sends": sends_json,
"unofficialServer": true,
"Object": "sync"
}))
}

View File

@@ -5,7 +5,8 @@ mod organizations;
mod sends;
pub mod two_factor;
pub use sends::start_send_deletion_scheduler;
pub use ciphers::purge_trashed_ciphers;
pub use sends::purge_sends;
pub fn routes() -> Vec<Route> {
let mut mod_routes =

View File

@@ -9,7 +9,7 @@ use serde_json::Value;
use crate::{
api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
auth::{Headers, Host},
db::{models::*, DbConn},
db::{models::*, DbConn, DbPool},
CONFIG,
};
@@ -19,21 +19,13 @@ pub fn routes() -> Vec<rocket::Route> {
routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password]
}
pub fn start_send_deletion_scheduler(pool: crate::db::DbPool) {
std::thread::spawn(move || {
loop {
if let Ok(conn) = pool.get() {
info!("Initiating send deletion");
for send in Send::find_all(&conn) {
if chrono::Utc::now().naive_utc() >= send.deletion_date {
send.delete(&conn).ok();
}
}
}
std::thread::sleep(std::time::Duration::from_secs(3600));
}
});
pub fn purge_sends(pool: DbPool) {
debug!("Purging sends");
if let Ok(conn) = pool.get() {
Send::purge(&conn);
} else {
error!("Failed to get DB connection while purging sends")
}
}
#[derive(Deserialize)]

View File

@@ -42,6 +42,7 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
// Build Regex only once since this takes a lot of time.
static ICON_REL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)icon$|apple.*icon").unwrap());
static ICON_REL_BLACKLIST: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)mask-icon").unwrap());
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
// Special HashMap which holds the user defined Regex to speedup matching the regex.
@@ -60,7 +61,9 @@ fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
}
match get_icon(&domain) {
Some(i) => Cached::ttl(Content(ContentType::new("image", "x-icon"), i), CONFIG.icon_cache_ttl()),
Some((icon, icon_type)) => {
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl())
}
_ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()),
}
}
@@ -254,7 +257,7 @@ fn is_domain_blacklisted(domain: &str) -> bool {
is_blacklisted
}
fn get_icon(domain: &str) -> Option<Vec<u8>> {
fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
let path = format!("{}/{}.png", CONFIG.icon_cache_folder(), domain);
// Check for expiration of negatively cached copy
@@ -263,7 +266,11 @@ fn get_icon(domain: &str) -> Option<Vec<u8>> {
}
if let Some(icon) = get_cached_icon(&path) {
return Some(icon);
let icon_type = match get_icon_type(&icon) {
Some(x) => x,
_ => "x-icon",
};
return Some((icon, icon_type.to_string()));
}
if CONFIG.disable_icon_download() {
@@ -272,9 +279,9 @@ fn get_icon(domain: &str) -> Option<Vec<u8>> {
// Get the icon, or None in case of error
match download_icon(&domain) {
Ok(icon) => {
Ok((icon, icon_type)) => {
save_icon(&path, &icon);
Some(icon)
Some((icon, icon_type.unwrap_or("x-icon").to_string()))
}
Err(e) => {
error!("Error downloading icon: {:?}", e);
@@ -335,7 +342,6 @@ fn icon_is_expired(path: &str) -> bool {
expired.unwrap_or(true)
}
#[derive(Debug)]
struct Icon {
priority: u8,
href: String,
@@ -367,7 +373,8 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve
let attr_name = attr.name.local.as_ref();
let attr_value = attr.value.as_ref();
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) {
if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value)
{
has_rel = true;
} else if attr_name == "href" {
href = Some(attr_value);
@@ -616,7 +623,7 @@ fn parse_sizes(sizes: Option<&str>) -> (u16, u16) {
(width, height)
}
fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
fn download_icon(domain: &str) -> Result<(Vec<u8>, Option<&str>), Error> {
if is_domain_blacklisted(domain) {
err!("Domain is blacklisted", domain)
}
@@ -624,6 +631,7 @@ fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
let icon_result = get_icon_url(&domain)?;
let mut buffer = Vec::new();
let mut icon_type: Option<&str> = None;
use data_url::DataUrl;
@@ -635,17 +643,31 @@ fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
Ok((body, _fragment)) => {
// Also check if the size is atleast 67 bytes, which seems to be the smallest png i could create
if body.len() >= 67 {
// Check if the icon type is allowed, else try an icon from the list.
icon_type = get_icon_type(&body);
if icon_type.is_none() {
debug!("Icon from {} data:image uri, is not a valid image type", domain);
continue;
}
info!("Extracted icon from data:image uri for {}", domain);
buffer = body;
break;
}
}
_ => warn!("data uri is invalid"),
_ => warn!("Extracted icon from data:image uri is invalid"),
};
} else {
match get_page_with_cookies(&icon.href, &icon_result.cookies, &icon_result.referer) {
Ok(mut res) => {
info!("Downloaded icon from {}", icon.href);
res.copy_to(&mut buffer)?;
// Check if the icon type is allowed, else try an icon from the list.
icon_type = get_icon_type(&buffer);
if icon_type.is_none() {
buffer.clear();
debug!("Icon from {}, is not a valid image type", icon.href);
continue;
}
info!("Downloaded icon from {}", icon.href);
break;
}
_ => warn!("Download failed for {}", icon.href),
@@ -654,10 +676,10 @@ fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
}
if buffer.is_empty() {
err!("Empty response")
err!("Empty response downloading icon")
}
Ok(buffer)
Ok((buffer, icon_type))
}
fn save_icon(path: &str, icon: &[u8]) {
@@ -669,7 +691,18 @@ fn save_icon(path: &str, icon: &[u8]) {
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache");
}
Err(e) => {
info!("Icon save error: {:?}", e);
warn!("Icon save error: {:?}", e);
}
}
}
fn get_icon_type(bytes: &[u8]) -> Option<&'static str> {
match bytes {
[137, 80, 78, 71, ..] => Some("png"),
[0, 0, 1, 0, ..] => Some("x-icon"),
[82, 73, 70, 70, ..] => Some("webp"),
[255, 216, 255, ..] => Some("jpeg"),
[66, 77, ..] => Some("bmp"),
_ => None,
}
}

View File

@@ -72,7 +72,8 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
"Kdf": user.client_kdf_type,
"KdfIterations": user.client_kdf_iter,
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
"scope": "api offline_access"
"scope": "api offline_access",
"unofficialServer": true,
})))
}
@@ -163,7 +164,8 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
"Kdf": user.client_kdf_type,
"KdfIterations": user.client_kdf_iter,
"ResetMasterPassword": false,// TODO: Same as above
"scope": "api offline_access"
"scope": "api offline_access",
"unofficialServer": true,
});
if let Some(token) = twofactor_token {

View File

@@ -10,8 +10,9 @@ use serde_json::Value;
pub use crate::api::{
admin::routes as admin_routes,
core::purge_sends,
core::purge_trashed_ciphers,
core::routes as core_routes,
core::start_send_deletion_scheduler,
icons::routes as icons_routes,
identity::routes as identity_routes,
notifications::routes as notifications_routes,